ngram
listlengths 0
67.8k
|
|---|
[
"as_dict=True) def get_data(): rooms = get_rooms() now = datetime.date(datetime.now()) for room in rooms:",
"'fieldname': 'actual_hk', 'label': 'Actual HK', 'fieldtype': 'Data', }, ] data = get_data() print(data)",
"order by number\"\"\", as_dict=True) def get_data(): rooms = get_rooms() now = datetime.date(datetime.now()) for",
"FO', 'fieldtype': 'Data', }, { 'fieldname': 'actual_hk', 'label': 'Actual HK', 'fieldtype': 'Data', },",
"get_rooms(): return frappe.db.sql(\"\"\" select number as room, room_type as type, room_status as system_fo",
"'Link', 'options': 'Inn Room', }, { 'fieldname': 'type', 'label': 'Type', 'fieldtype': 'Link', 'options':",
"{ 'fieldname': 'actual_hk', 'label': 'Actual HK', 'fieldtype': 'Data', }, ] data = get_data()",
"Room', }, { 'fieldname': 'type', 'label': 'Type', 'fieldtype': 'Link', 'options': 'Inn Room Type',",
"'Actual HK', 'fieldtype': 'Data', }, ] data = get_data() print(data) return columns, data",
"Initiative and contributors # For license information, please see license.txt from __future__ import",
"'fieldtype': 'Date', }, { 'fieldname': 'room', 'label': 'Room', 'fieldtype': 'Link', 'options': 'Inn Room',",
"words = room['system_fo'].split() letters = [word[0] for word in words] room['system_fo'] = \"\".join(letters)",
"datetime def execute(filters=None): columns = [ { 'fieldname': 'date', 'label': 'Date', 'fieldtype': 'Date',",
"and contributors # For license information, please see license.txt from __future__ import unicode_literals",
"type, room_status as system_fo from `tabInn Room` order by number\"\"\", as_dict=True) def get_data():",
"= [ { 'fieldname': 'date', 'label': 'Date', 'fieldtype': 'Date', }, { 'fieldname': 'room',",
"] data = get_data() print(data) return columns, data def get_rooms(): return frappe.db.sql(\"\"\" select",
"'Room', 'fieldtype': 'Link', 'options': 'Inn Room', }, { 'fieldname': 'type', 'label': 'Type', 'fieldtype':",
"system_fo from `tabInn Room` order by number\"\"\", as_dict=True) def get_data(): rooms = get_rooms()",
"'label': 'Actual HK', 'fieldtype': 'Data', }, ] data = get_data() print(data) return columns,",
"in rooms: room['date'] = now words = room['system_fo'].split() letters = [word[0] for word",
"license.txt from __future__ import unicode_literals import frappe from datetime import datetime def execute(filters=None):",
"import frappe from datetime import datetime def execute(filters=None): columns = [ { 'fieldname':",
"'room', 'label': 'Room', 'fieldtype': 'Link', 'options': 'Inn Room', }, { 'fieldname': 'type', 'label':",
"now = datetime.date(datetime.now()) for room in rooms: room['date'] = now words = room['system_fo'].split()",
"'actual_hk', 'label': 'Actual HK', 'fieldtype': 'Data', }, ] data = get_data() print(data) return",
"unicode_literals import frappe from datetime import datetime def execute(filters=None): columns = [ {",
"'fieldtype': 'Data', }, ] data = get_data() print(data) return columns, data def get_rooms():",
"contributors # For license information, please see license.txt from __future__ import unicode_literals import",
"# For license information, please see license.txt from __future__ import unicode_literals import frappe",
"select number as room, room_type as type, room_status as system_fo from `tabInn Room`",
"datetime.date(datetime.now()) for room in rooms: room['date'] = now words = room['system_fo'].split() letters =",
"__future__ import unicode_literals import frappe from datetime import datetime def execute(filters=None): columns =",
"data = get_data() print(data) return columns, data def get_rooms(): return frappe.db.sql(\"\"\" select number",
"from datetime import datetime def execute(filters=None): columns = [ { 'fieldname': 'date', 'label':",
"as system_fo from `tabInn Room` order by number\"\"\", as_dict=True) def get_data(): rooms =",
"print(data) return columns, data def get_rooms(): return frappe.db.sql(\"\"\" select number as room, room_type",
"'System FO', 'fieldtype': 'Data', }, { 'fieldname': 'actual_hk', 'label': 'Actual HK', 'fieldtype': 'Data',",
"= now words = room['system_fo'].split() letters = [word[0] for word in words] room['system_fo']",
"frappe from datetime import datetime def execute(filters=None): columns = [ { 'fieldname': 'date',",
"}, { 'fieldname': 'type', 'label': 'Type', 'fieldtype': 'Link', 'options': 'Inn Room Type', },",
"{ 'fieldname': 'date', 'label': 'Date', 'fieldtype': 'Date', }, { 'fieldname': 'room', 'label': 'Room',",
"'type', 'label': 'Type', 'fieldtype': 'Link', 'options': 'Inn Room Type', }, { 'fieldname': 'system_fo',",
"get_data() print(data) return columns, data def get_rooms(): return frappe.db.sql(\"\"\" select number as room,",
"from __future__ import unicode_literals import frappe from datetime import datetime def execute(filters=None): columns",
"For license information, please see license.txt from __future__ import unicode_literals import frappe from",
"for room in rooms: room['date'] = now words = room['system_fo'].split() letters = [word[0]",
"Copyright (c) 2013, Core Initiative and contributors # For license information, please see",
"'Inn Room Type', }, { 'fieldname': 'system_fo', 'label': 'System FO', 'fieldtype': 'Data', },",
"HK', 'fieldtype': 'Data', }, ] data = get_data() print(data) return columns, data def",
"license information, please see license.txt from __future__ import unicode_literals import frappe from datetime",
"Type', }, { 'fieldname': 'system_fo', 'label': 'System FO', 'fieldtype': 'Data', }, { 'fieldname':",
"columns, data def get_rooms(): return frappe.db.sql(\"\"\" select number as room, room_type as type,",
"# Copyright (c) 2013, Core Initiative and contributors # For license information, please",
"}, { 'fieldname': 'room', 'label': 'Room', 'fieldtype': 'Link', 'options': 'Inn Room', }, {",
"data def get_rooms(): return frappe.db.sql(\"\"\" select number as room, room_type as type, room_status",
"'options': 'Inn Room Type', }, { 'fieldname': 'system_fo', 'label': 'System FO', 'fieldtype': 'Data',",
"room['date'] = now words = room['system_fo'].split() letters = [word[0] for word in words]",
"import datetime def execute(filters=None): columns = [ { 'fieldname': 'date', 'label': 'Date', 'fieldtype':",
"Room Type', }, { 'fieldname': 'system_fo', 'label': 'System FO', 'fieldtype': 'Data', }, {",
"Room` order by number\"\"\", as_dict=True) def get_data(): rooms = get_rooms() now = datetime.date(datetime.now())",
"2013, Core Initiative and contributors # For license information, please see license.txt from",
"def get_rooms(): return frappe.db.sql(\"\"\" select number as room, room_type as type, room_status as",
"'label': 'Room', 'fieldtype': 'Link', 'options': 'Inn Room', }, { 'fieldname': 'type', 'label': 'Type',",
"Core Initiative and contributors # For license information, please see license.txt from __future__",
"'Inn Room', }, { 'fieldname': 'type', 'label': 'Type', 'fieldtype': 'Link', 'options': 'Inn Room",
"def get_data(): rooms = get_rooms() now = datetime.date(datetime.now()) for room in rooms: room['date']",
"return columns, data def get_rooms(): return frappe.db.sql(\"\"\" select number as room, room_type as",
"columns = [ { 'fieldname': 'date', 'label': 'Date', 'fieldtype': 'Date', }, { 'fieldname':",
"'fieldname': 'room', 'label': 'Room', 'fieldtype': 'Link', 'options': 'Inn Room', }, { 'fieldname': 'type',",
"'Link', 'options': 'Inn Room Type', }, { 'fieldname': 'system_fo', 'label': 'System FO', 'fieldtype':",
"= get_data() print(data) return columns, data def get_rooms(): return frappe.db.sql(\"\"\" select number as",
"'Data', }, { 'fieldname': 'actual_hk', 'label': 'Actual HK', 'fieldtype': 'Data', }, ] data",
"{ 'fieldname': 'type', 'label': 'Type', 'fieldtype': 'Link', 'options': 'Inn Room Type', }, {",
"information, please see license.txt from __future__ import unicode_literals import frappe from datetime import",
"rooms: room['date'] = now words = room['system_fo'].split() letters = [word[0] for word in",
"frappe.db.sql(\"\"\" select number as room, room_type as type, room_status as system_fo from `tabInn",
"number\"\"\", as_dict=True) def get_data(): rooms = get_rooms() now = datetime.date(datetime.now()) for room in",
"'Date', }, { 'fieldname': 'room', 'label': 'Room', 'fieldtype': 'Link', 'options': 'Inn Room', },",
"= datetime.date(datetime.now()) for room in rooms: room['date'] = now words = room['system_fo'].split() letters",
"[ { 'fieldname': 'date', 'label': 'Date', 'fieldtype': 'Date', }, { 'fieldname': 'room', 'label':",
"}, { 'fieldname': 'actual_hk', 'label': 'Actual HK', 'fieldtype': 'Data', }, ] data =",
"room_status as system_fo from `tabInn Room` order by number\"\"\", as_dict=True) def get_data(): rooms",
"'Data', }, ] data = get_data() print(data) return columns, data def get_rooms(): return",
"(c) 2013, Core Initiative and contributors # For license information, please see license.txt",
"import unicode_literals import frappe from datetime import datetime def execute(filters=None): columns = [",
"{ 'fieldname': 'room', 'label': 'Room', 'fieldtype': 'Link', 'options': 'Inn Room', }, { 'fieldname':",
"by number\"\"\", as_dict=True) def get_data(): rooms = get_rooms() now = datetime.date(datetime.now()) for room",
"}, ] data = get_data() print(data) return columns, data def get_rooms(): return frappe.db.sql(\"\"\"",
"number as room, room_type as type, room_status as system_fo from `tabInn Room` order",
"'fieldtype': 'Link', 'options': 'Inn Room Type', }, { 'fieldname': 'system_fo', 'label': 'System FO',",
"as room, room_type as type, room_status as system_fo from `tabInn Room` order by",
"'Type', 'fieldtype': 'Link', 'options': 'Inn Room Type', }, { 'fieldname': 'system_fo', 'label': 'System",
"= room['system_fo'].split() letters = [word[0] for word in words] room['system_fo'] = \"\".join(letters) return",
"'options': 'Inn Room', }, { 'fieldname': 'type', 'label': 'Type', 'fieldtype': 'Link', 'options': 'Inn",
"`tabInn Room` order by number\"\"\", as_dict=True) def get_data(): rooms = get_rooms() now =",
"'fieldname': 'type', 'label': 'Type', 'fieldtype': 'Link', 'options': 'Inn Room Type', }, { 'fieldname':",
"}, { 'fieldname': 'system_fo', 'label': 'System FO', 'fieldtype': 'Data', }, { 'fieldname': 'actual_hk',",
"'label': 'System FO', 'fieldtype': 'Data', }, { 'fieldname': 'actual_hk', 'label': 'Actual HK', 'fieldtype':",
"'fieldtype': 'Link', 'options': 'Inn Room', }, { 'fieldname': 'type', 'label': 'Type', 'fieldtype': 'Link',",
"'label': 'Date', 'fieldtype': 'Date', }, { 'fieldname': 'room', 'label': 'Room', 'fieldtype': 'Link', 'options':",
"return frappe.db.sql(\"\"\" select number as room, room_type as type, room_status as system_fo from",
"datetime import datetime def execute(filters=None): columns = [ { 'fieldname': 'date', 'label': 'Date',",
"see license.txt from __future__ import unicode_literals import frappe from datetime import datetime def",
"'fieldname': 'date', 'label': 'Date', 'fieldtype': 'Date', }, { 'fieldname': 'room', 'label': 'Room', 'fieldtype':",
"room, room_type as type, room_status as system_fo from `tabInn Room` order by number\"\"\",",
"'fieldname': 'system_fo', 'label': 'System FO', 'fieldtype': 'Data', }, { 'fieldname': 'actual_hk', 'label': 'Actual",
"execute(filters=None): columns = [ { 'fieldname': 'date', 'label': 'Date', 'fieldtype': 'Date', }, {",
"from `tabInn Room` order by number\"\"\", as_dict=True) def get_data(): rooms = get_rooms() now",
"{ 'fieldname': 'system_fo', 'label': 'System FO', 'fieldtype': 'Data', }, { 'fieldname': 'actual_hk', 'label':",
"room in rooms: room['date'] = now words = room['system_fo'].split() letters = [word[0] for",
"get_data(): rooms = get_rooms() now = datetime.date(datetime.now()) for room in rooms: room['date'] =",
"= get_rooms() now = datetime.date(datetime.now()) for room in rooms: room['date'] = now words",
"now words = room['system_fo'].split() letters = [word[0] for word in words] room['system_fo'] =",
"'date', 'label': 'Date', 'fieldtype': 'Date', }, { 'fieldname': 'room', 'label': 'Room', 'fieldtype': 'Link',",
"rooms = get_rooms() now = datetime.date(datetime.now()) for room in rooms: room['date'] = now",
"'fieldtype': 'Data', }, { 'fieldname': 'actual_hk', 'label': 'Actual HK', 'fieldtype': 'Data', }, ]",
"'system_fo', 'label': 'System FO', 'fieldtype': 'Data', }, { 'fieldname': 'actual_hk', 'label': 'Actual HK',",
"get_rooms() now = datetime.date(datetime.now()) for room in rooms: room['date'] = now words =",
"please see license.txt from __future__ import unicode_literals import frappe from datetime import datetime",
"'Date', 'fieldtype': 'Date', }, { 'fieldname': 'room', 'label': 'Room', 'fieldtype': 'Link', 'options': 'Inn",
"'label': 'Type', 'fieldtype': 'Link', 'options': 'Inn Room Type', }, { 'fieldname': 'system_fo', 'label':",
"room['system_fo'].split() letters = [word[0] for word in words] room['system_fo'] = \"\".join(letters) return rooms",
"def execute(filters=None): columns = [ { 'fieldname': 'date', 'label': 'Date', 'fieldtype': 'Date', },",
"as type, room_status as system_fo from `tabInn Room` order by number\"\"\", as_dict=True) def",
"<gh_stars>1-10 # Copyright (c) 2013, Core Initiative and contributors # For license information,",
"room_type as type, room_status as system_fo from `tabInn Room` order by number\"\"\", as_dict=True)"
] |
[
"speed and angle # Setting nod with block=False ensures the program continues to",
"speed 10 degrees either side of current angle. Blocks program execution until finished.",
"speed=50, block=True) # Shake and nod at the same time with default speed",
"and angle # Setting nod with block=False ensures the program continues to the",
"head controller object head = TiltRollHeadController() # Initialize the servo angles head.roll.target_angle =",
"the same time with default speed and angle # Setting nod with block=False",
"at the same time with default speed and angle # Setting nod with",
"pitop import TiltRollHeadController # Create a head controller object head = TiltRollHeadController() #",
"until finished. head.nod(times=6, angle=5, speed=100, block=True) # Shake 4 times at half speed",
"= 0 head.tilt.target_angle = 50 sleep(1) # Nod 6 times at max speed",
"side of current angle. Blocks program execution until finished. head.shake(times=4, angle=10, speed=50, block=True)",
"Shake 4 times at half speed 10 degrees either side of current angle.",
"program execution until finished. head.nod(times=6, angle=5, speed=100, block=True) # Shake 4 times at",
"block=True) # Shake 4 times at half speed 10 degrees either side of",
"Blocks program execution until finished. head.shake(times=4, angle=10, speed=50, block=True) # Shake and nod",
"Shake and nod at the same time with default speed and angle #",
"execution until finished. head.nod(times=6, angle=5, speed=100, block=True) # Shake 4 times at half",
"# Create a head controller object head = TiltRollHeadController() # Initialize the servo",
"= TiltRollHeadController() # Initialize the servo angles head.roll.target_angle = 0 head.tilt.target_angle = 50",
"TiltRollHeadController() # Initialize the servo angles head.roll.target_angle = 0 head.tilt.target_angle = 50 sleep(1)",
"a head controller object head = TiltRollHeadController() # Initialize the servo angles head.roll.target_angle",
"the servo angles head.roll.target_angle = 0 head.tilt.target_angle = 50 sleep(1) # Nod 6",
"Blocks program execution until finished. head.nod(times=6, angle=5, speed=100, block=True) # Shake 4 times",
"default speed and angle # Setting nod with block=False ensures the program continues",
"with default speed and angle # Setting nod with block=False ensures the program",
"from time import sleep from pitop import TiltRollHeadController # Create a head controller",
"side of current angle. Blocks program execution until finished. head.nod(times=6, angle=5, speed=100, block=True)",
"with block=False ensures the program continues to the next command head.nod(times=6, block=False) head.shake(times=6,",
"either side of current angle. Blocks program execution until finished. head.nod(times=6, angle=5, speed=100,",
"# Shake and nod at the same time with default speed and angle",
"TiltRollHeadController # Create a head controller object head = TiltRollHeadController() # Initialize the",
"head = TiltRollHeadController() # Initialize the servo angles head.roll.target_angle = 0 head.tilt.target_angle =",
"# Setting nod with block=False ensures the program continues to the next command",
"Setting nod with block=False ensures the program continues to the next command head.nod(times=6,",
"from pitop import TiltRollHeadController # Create a head controller object head = TiltRollHeadController()",
"50 sleep(1) # Nod 6 times at max speed 5 degrees either side",
"time with default speed and angle # Setting nod with block=False ensures the",
"object head = TiltRollHeadController() # Initialize the servo angles head.roll.target_angle = 0 head.tilt.target_angle",
"head.roll.target_angle = 0 head.tilt.target_angle = 50 sleep(1) # Nod 6 times at max",
"angle # Setting nod with block=False ensures the program continues to the next",
"speed=100, block=True) # Shake 4 times at half speed 10 degrees either side",
"head.shake(times=4, angle=10, speed=50, block=True) # Shake and nod at the same time with",
"head.tilt.target_angle = 50 sleep(1) # Nod 6 times at max speed 5 degrees",
"of current angle. Blocks program execution until finished. head.shake(times=4, angle=10, speed=50, block=True) #",
"sleep from pitop import TiltRollHeadController # Create a head controller object head =",
"controller object head = TiltRollHeadController() # Initialize the servo angles head.roll.target_angle = 0",
"6 times at max speed 5 degrees either side of current angle. Blocks",
"block=True) # Shake and nod at the same time with default speed and",
"either side of current angle. Blocks program execution until finished. head.shake(times=4, angle=10, speed=50,",
"0 head.tilt.target_angle = 50 sleep(1) # Nod 6 times at max speed 5",
"execution until finished. head.shake(times=4, angle=10, speed=50, block=True) # Shake and nod at the",
"import sleep from pitop import TiltRollHeadController # Create a head controller object head",
"at max speed 5 degrees either side of current angle. Blocks program execution",
"5 degrees either side of current angle. Blocks program execution until finished. head.nod(times=6,",
"at half speed 10 degrees either side of current angle. Blocks program execution",
"times at half speed 10 degrees either side of current angle. Blocks program",
"degrees either side of current angle. Blocks program execution until finished. head.nod(times=6, angle=5,",
"# Shake 4 times at half speed 10 degrees either side of current",
"# Nod 6 times at max speed 5 degrees either side of current",
"nod with block=False ensures the program continues to the next command head.nod(times=6, block=False)",
"program execution until finished. head.shake(times=4, angle=10, speed=50, block=True) # Shake and nod at",
"of current angle. Blocks program execution until finished. head.nod(times=6, angle=5, speed=100, block=True) #",
"head.nod(times=6, angle=5, speed=100, block=True) # Shake 4 times at half speed 10 degrees",
"sleep(1) # Nod 6 times at max speed 5 degrees either side of",
"10 degrees either side of current angle. Blocks program execution until finished. head.shake(times=4,",
"Initialize the servo angles head.roll.target_angle = 0 head.tilt.target_angle = 50 sleep(1) # Nod",
"angle. Blocks program execution until finished. head.shake(times=4, angle=10, speed=50, block=True) # Shake and",
"finished. head.shake(times=4, angle=10, speed=50, block=True) # Shake and nod at the same time",
"servo angles head.roll.target_angle = 0 head.tilt.target_angle = 50 sleep(1) # Nod 6 times",
"same time with default speed and angle # Setting nod with block=False ensures",
"degrees either side of current angle. Blocks program execution until finished. head.shake(times=4, angle=10,",
"Create a head controller object head = TiltRollHeadController() # Initialize the servo angles",
"and nod at the same time with default speed and angle # Setting",
"time import sleep from pitop import TiltRollHeadController # Create a head controller object",
"current angle. Blocks program execution until finished. head.shake(times=4, angle=10, speed=50, block=True) # Shake",
"until finished. head.shake(times=4, angle=10, speed=50, block=True) # Shake and nod at the same",
"Nod 6 times at max speed 5 degrees either side of current angle.",
"nod at the same time with default speed and angle # Setting nod",
"4 times at half speed 10 degrees either side of current angle. Blocks",
"angle=10, speed=50, block=True) # Shake and nod at the same time with default",
"angle. Blocks program execution until finished. head.nod(times=6, angle=5, speed=100, block=True) # Shake 4",
"angle=5, speed=100, block=True) # Shake 4 times at half speed 10 degrees either",
"block=False ensures the program continues to the next command head.nod(times=6, block=False) head.shake(times=6, block=True)",
"# Initialize the servo angles head.roll.target_angle = 0 head.tilt.target_angle = 50 sleep(1) #",
"import TiltRollHeadController # Create a head controller object head = TiltRollHeadController() # Initialize",
"times at max speed 5 degrees either side of current angle. Blocks program",
"finished. head.nod(times=6, angle=5, speed=100, block=True) # Shake 4 times at half speed 10",
"half speed 10 degrees either side of current angle. Blocks program execution until",
"speed 5 degrees either side of current angle. Blocks program execution until finished.",
"angles head.roll.target_angle = 0 head.tilt.target_angle = 50 sleep(1) # Nod 6 times at",
"current angle. Blocks program execution until finished. head.nod(times=6, angle=5, speed=100, block=True) # Shake",
"= 50 sleep(1) # Nod 6 times at max speed 5 degrees either",
"max speed 5 degrees either side of current angle. Blocks program execution until"
] |
[
"getData = GetData(5) features = getData.getAllFeatures('open', 'close') self.assertIsNotNone(features) self.assertEqual(len(features[0][0]), 2) if __name__ ==",
"features = getData.getAllFeatures() self.assertIsNotNone(features) def test_getAllFeatures2(self): getData = GetData(101) features = getData.getAllFeatures() self.assertIsNotNone(features)",
"features = getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features), 100) def test_getAllFeatures3(self): getData = GetData(5) features =",
"= GetData(5) features = getData.getAllFeatures('open', 'close') self.assertIsNotNone(features) self.assertEqual(len(features[0][0]), 2) if __name__ == '__main__':",
"getData.getAllFeatures() self.assertIsNotNone(features) def test_getAllFeatures2(self): getData = GetData(101) features = getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features), 100)",
"'../models') from get_data import GetData # from python.ultilities.get_data import GetData import unittest import",
"csv class TestGetData(unittest.TestCase): def test_getAllFeatures1(self): getData = GetData() features = getData.getAllFeatures() self.assertIsNotNone(features) def",
"test_getAllFeatures2(self): getData = GetData(101) features = getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features), 100) def test_getAllFeatures3(self): getData",
"= GetData() features = getData.getAllFeatures() self.assertIsNotNone(features) def test_getAllFeatures2(self): getData = GetData(101) features =",
"from get_data import GetData # from python.ultilities.get_data import GetData import unittest import csv",
"import csv class TestGetData(unittest.TestCase): def test_getAllFeatures1(self): getData = GetData() features = getData.getAllFeatures() self.assertIsNotNone(features)",
"class TestGetData(unittest.TestCase): def test_getAllFeatures1(self): getData = GetData() features = getData.getAllFeatures() self.assertIsNotNone(features) def test_getAllFeatures2(self):",
"GetData(5) features = getData.getAllFeatures('open', 'close') self.assertIsNotNone(features) self.assertEqual(len(features[0][0]), 2) if __name__ == '__main__': unittest.main()",
"python.ultilities.get_data import GetData import unittest import csv class TestGetData(unittest.TestCase): def test_getAllFeatures1(self): getData =",
"from python.ultilities.get_data import GetData import unittest import csv class TestGetData(unittest.TestCase): def test_getAllFeatures1(self): getData",
"sys sys.path.insert(0, '../models') from get_data import GetData # from python.ultilities.get_data import GetData import",
"GetData(101) features = getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features), 100) def test_getAllFeatures3(self): getData = GetData(5) features",
"sys.path.insert(0, '../models') from get_data import GetData # from python.ultilities.get_data import GetData import unittest",
"getData = GetData(101) features = getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features), 100) def test_getAllFeatures3(self): getData =",
"def test_getAllFeatures3(self): getData = GetData(5) features = getData.getAllFeatures('open', 'close') self.assertIsNotNone(features) self.assertEqual(len(features[0][0]), 2) if",
"def test_getAllFeatures1(self): getData = GetData() features = getData.getAllFeatures() self.assertIsNotNone(features) def test_getAllFeatures2(self): getData =",
"import GetData import unittest import csv class TestGetData(unittest.TestCase): def test_getAllFeatures1(self): getData = GetData()",
"GetData() features = getData.getAllFeatures() self.assertIsNotNone(features) def test_getAllFeatures2(self): getData = GetData(101) features = getData.getAllFeatures()",
"get_data import GetData # from python.ultilities.get_data import GetData import unittest import csv class",
"GetData # from python.ultilities.get_data import GetData import unittest import csv class TestGetData(unittest.TestCase): def",
"self.assertIsNotNone(features) def test_getAllFeatures2(self): getData = GetData(101) features = getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features), 100) def",
"unittest import csv class TestGetData(unittest.TestCase): def test_getAllFeatures1(self): getData = GetData() features = getData.getAllFeatures()",
"def test_getAllFeatures2(self): getData = GetData(101) features = getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features), 100) def test_getAllFeatures3(self):",
"self.assertEqual(len(features), 100) def test_getAllFeatures3(self): getData = GetData(5) features = getData.getAllFeatures('open', 'close') self.assertIsNotNone(features) self.assertEqual(len(features[0][0]),",
"test_getAllFeatures3(self): getData = GetData(5) features = getData.getAllFeatures('open', 'close') self.assertIsNotNone(features) self.assertEqual(len(features[0][0]), 2) if __name__",
"100) def test_getAllFeatures3(self): getData = GetData(5) features = getData.getAllFeatures('open', 'close') self.assertIsNotNone(features) self.assertEqual(len(features[0][0]), 2)",
"TestGetData(unittest.TestCase): def test_getAllFeatures1(self): getData = GetData() features = getData.getAllFeatures() self.assertIsNotNone(features) def test_getAllFeatures2(self): getData",
"self.assertIsNotNone(features) self.assertEqual(len(features), 100) def test_getAllFeatures3(self): getData = GetData(5) features = getData.getAllFeatures('open', 'close') self.assertIsNotNone(features)",
"GetData import unittest import csv class TestGetData(unittest.TestCase): def test_getAllFeatures1(self): getData = GetData() features",
"= getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features), 100) def test_getAllFeatures3(self): getData = GetData(5) features = getData.getAllFeatures('open',",
"# from python.ultilities.get_data import GetData import unittest import csv class TestGetData(unittest.TestCase): def test_getAllFeatures1(self):",
"test_getAllFeatures1(self): getData = GetData() features = getData.getAllFeatures() self.assertIsNotNone(features) def test_getAllFeatures2(self): getData = GetData(101)",
"import sys sys.path.insert(0, '../models') from get_data import GetData # from python.ultilities.get_data import GetData",
"= getData.getAllFeatures() self.assertIsNotNone(features) def test_getAllFeatures2(self): getData = GetData(101) features = getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features),",
"import unittest import csv class TestGetData(unittest.TestCase): def test_getAllFeatures1(self): getData = GetData() features =",
"= GetData(101) features = getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features), 100) def test_getAllFeatures3(self): getData = GetData(5)",
"import GetData # from python.ultilities.get_data import GetData import unittest import csv class TestGetData(unittest.TestCase):",
"getData.getAllFeatures() self.assertIsNotNone(features) self.assertEqual(len(features), 100) def test_getAllFeatures3(self): getData = GetData(5) features = getData.getAllFeatures('open', 'close')",
"getData = GetData() features = getData.getAllFeatures() self.assertIsNotNone(features) def test_getAllFeatures2(self): getData = GetData(101) features"
] |
[
"mock import patch from gitflow_easyrelease import cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app, mock_sub,",
"gitflow_easyrelease import cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app, mock_sub, mock_color): mock_color.assert_not_called() mock_sub.assert_not_called() mock_app.assert_not_called()",
"cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app, mock_sub, mock_color): mock_color.assert_not_called() mock_sub.assert_not_called() mock_app.assert_not_called() cli() mock_color.assert_called_once()",
"import cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app, mock_sub, mock_color): mock_color.assert_not_called() mock_sub.assert_not_called() mock_app.assert_not_called() cli()",
"def test_execution(mock_app, mock_sub, mock_color): mock_color.assert_not_called() mock_sub.assert_not_called() mock_app.assert_not_called() cli() mock_color.assert_called_once() assert 1 <= mock_sub.call_count",
"from gitflow_easyrelease import cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app, mock_sub, mock_color): mock_color.assert_not_called() mock_sub.assert_not_called()",
"test_execution(mock_app, mock_sub, mock_color): mock_color.assert_not_called() mock_sub.assert_not_called() mock_app.assert_not_called() cli() mock_color.assert_called_once() assert 1 <= mock_sub.call_count mock_app.assert_called_once()",
"pylint: disable=missing-docstring from __future__ import print_function from mock import patch from gitflow_easyrelease import",
"# pylint: disable=missing-docstring from __future__ import print_function from mock import patch from gitflow_easyrelease",
"__future__ import print_function from mock import patch from gitflow_easyrelease import cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand')",
"@patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app, mock_sub, mock_color): mock_color.assert_not_called() mock_sub.assert_not_called() mock_app.assert_not_called() cli() mock_color.assert_called_once() assert",
"@patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app, mock_sub, mock_color): mock_color.assert_not_called() mock_sub.assert_not_called() mock_app.assert_not_called() cli() mock_color.assert_called_once() assert 1",
"import patch from gitflow_easyrelease import cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app, mock_sub, mock_color):",
"from mock import patch from gitflow_easyrelease import cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app,",
"patch from gitflow_easyrelease import cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app, mock_sub, mock_color): mock_color.assert_not_called()",
"@patch('gitflow_easyrelease.cli_file.Application') def test_execution(mock_app, mock_sub, mock_color): mock_color.assert_not_called() mock_sub.assert_not_called() mock_app.assert_not_called() cli() mock_color.assert_called_once() assert 1 <=",
"from __future__ import print_function from mock import patch from gitflow_easyrelease import cli @patch('gitflow_easyrelease.cli_file.ColorOutput')",
"print_function from mock import patch from gitflow_easyrelease import cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application') def",
"import print_function from mock import patch from gitflow_easyrelease import cli @patch('gitflow_easyrelease.cli_file.ColorOutput') @patch('gitflow_easyrelease.cli_file.Subcommand') @patch('gitflow_easyrelease.cli_file.Application')",
"disable=missing-docstring from __future__ import print_function from mock import patch from gitflow_easyrelease import cli"
] |
[
".. | grimhacker.com .. | @_grimhacker .. | -------------------------------------------------------------------------------- Created on 22 Sep",
"| -------------------------------------------------------------------------------- Created on 22 Sep 2013 @author: GrimHacker ''' import logging from",
"| executes a command as a subprocess .. | GrimHacker .. | ..",
"line from stdout of executed command \"\"\" for line in out.split(\"\\n\"): if line",
"to here the subprocess has finished running except CalledProcessError, e: self.log.error(\"{0}: {1}\".format(e.errno, e.strerror))",
"executed command \"\"\" for line in out.split(\"\\n\"): if line != \"\": # output",
"executes a command as a subprocess .. | GrimHacker .. | .. |",
"finished out, err = proc.communicate(\"s\") if err is not None: self._stderr(err) if out",
"def _execute(self, cmd): \"\"\" run the specified command as a subprocess \"\"\" self.log.debug(\"running:",
"the specified command as a subprocess \"\"\" self.log.debug(\"running: '{0}'\".format(cmd)) try: proc = AsyncPopen(cmd,",
"if line != \"\": # output anything that isn't a blank line self.log.info(\"{0}\".format(line))",
".. | GrimHacker .. | .. | grimhacker.com .. | @_grimhacker .. |",
"CalledProcessError from lib.async_subprocess import AsyncPopen, PIPE class Command(): def __init__(self): self.log = logging.getLogger(__name__)",
"= AsyncPopen(cmd, stdout=PIPE, stderr=PIPE) while proc.poll() is None: # while subprocess hasn't finished",
"Sep 2013 @author: GrimHacker ''' import logging from subprocess import CalledProcessError from lib.async_subprocess",
"| grimhacker.com .. | @_grimhacker .. | -------------------------------------------------------------------------------- Created on 22 Sep 2013",
"this in a thread # self._stderr(line) # when we get to here the",
"#line = proc.stdout.readline().strip(\"\\n\") #self._stdout(line) # line = proc.stderr.readline().strip(\"\\n\") # waits for stderr. #TODO:",
"a subprocess \"\"\" self.log.debug(\"running: '{0}'\".format(cmd)) try: proc = AsyncPopen(cmd, stdout=PIPE, stderr=PIPE) while proc.poll()",
"try: proc = AsyncPopen(cmd, stdout=PIPE, stderr=PIPE) while proc.poll() is None: # while subprocess",
".. | @_grimhacker .. | -------------------------------------------------------------------------------- Created on 22 Sep 2013 @author: GrimHacker",
"stderr=PIPE) while proc.poll() is None: # while subprocess hasn't finished out, err =",
"= proc.stderr.readline().strip(\"\\n\") # waits for stderr. #TODO: need to put this in a",
"_execute(self, cmd): \"\"\" run the specified command as a subprocess \"\"\" self.log.debug(\"running: '{0}'\".format(cmd))",
"# output anything that isn't a blank line self.log.info(\"{0}\".format(line)) def _stderr(self, err): \"\"\"",
"anything that isn't a blank line self.log.info(\"{0}\".format(line)) def _stderr(self, err): \"\"\" print line",
"| Title: command .10000000000011. .. | Author: <NAME> .00 000... | Email: <EMAIL>",
"err): \"\"\" print line from stderr of executed command \"\"\" for line in",
"need to put this in a thread # self._stderr(line) # when we get",
"= proc.stdout.readline().strip(\"\\n\") #self._stdout(line) # line = proc.stderr.readline().strip(\"\\n\") # waits for stderr. #TODO: need",
"line self.log.warning(\"{0}\".format(line)) def _execute(self, cmd): \"\"\" run the specified command as a subprocess",
".. | executes a command as a subprocess .. | GrimHacker .. |",
"command \"\"\" for line in out.split(\"\\n\"): if line != \"\": # output anything",
"| Email: <EMAIL> 1 01.. | Description: .. | executes a command as",
"blank line self.log.info(\"{0}\".format(line)) def _stderr(self, err): \"\"\" print line from stderr of executed",
"self._stdout(out) #line = proc.stdout.readline().strip(\"\\n\") #self._stdout(line) # line = proc.stderr.readline().strip(\"\\n\") # waits for stderr.",
"#self._stdout(line) # line = proc.stderr.readline().strip(\"\\n\") # waits for stderr. #TODO: need to put",
"None: self._stdout(out) #line = proc.stdout.readline().strip(\"\\n\") #self._stdout(line) # line = proc.stderr.readline().strip(\"\\n\") # waits for",
"GrimHacker ''' import logging from subprocess import CalledProcessError from lib.async_subprocess import AsyncPopen, PIPE",
"that isn't a blank line self.log.warning(\"{0}\".format(line)) def _execute(self, cmd): \"\"\" run the specified",
"AsyncPopen(cmd, stdout=PIPE, stderr=PIPE) while proc.poll() is None: # while subprocess hasn't finished out,",
"out, err = proc.communicate(\"s\") if err is not None: self._stderr(err) if out is",
".. | Author: <NAME> .00 000... | Email: <EMAIL> 1 01.. | Description:",
"!= \"\": # output anything that isn't a blank line self.log.warning(\"{0}\".format(line)) def _execute(self,",
"\"\"\" for line in out.split(\"\\n\"): if line != \"\": # output anything that",
"err.split(\"\\n\"): if line != \"\": # output anything that isn't a blank line",
"while subprocess hasn't finished out, err = proc.communicate(\"s\") if err is not None:",
"command \"\"\" for line in err.split(\"\\n\"): if line != \"\": # output anything",
"# while subprocess hasn't finished out, err = proc.communicate(\"s\") if err is not",
"from lib.async_subprocess import AsyncPopen, PIPE class Command(): def __init__(self): self.log = logging.getLogger(__name__) def",
"# waits for stderr. #TODO: need to put this in a thread #",
"in a thread # self._stderr(line) # when we get to here the subprocess",
"the subprocess has finished running except CalledProcessError, e: self.log.error(\"{0}: {1}\".format(e.errno, e.strerror)) #return \"{0}:",
"line in err.split(\"\\n\"): if line != \"\": # output anything that isn't a",
"if out is not None: self._stdout(out) #line = proc.stdout.readline().strip(\"\\n\") #self._stdout(line) # line =",
"from stderr of executed command \"\"\" for line in err.split(\"\\n\"): if line !=",
"22 Sep 2013 @author: GrimHacker ''' import logging from subprocess import CalledProcessError from",
"Author: <NAME> .00 000... | Email: <EMAIL> 1 01.. | Description: .. |",
"stdout of executed command \"\"\" for line in out.split(\"\\n\"): if line != \"\":",
".. | -------------------------------------------------------------------------------- Created on 22 Sep 2013 @author: GrimHacker ''' import logging",
"of executed command \"\"\" for line in out.split(\"\\n\"): if line != \"\": #",
"# self._stderr(line) # when we get to here the subprocess has finished running",
"isn't a blank line self.log.info(\"{0}\".format(line)) def _stderr(self, err): \"\"\" print line from stderr",
"| .. | grimhacker.com .. | @_grimhacker .. | -------------------------------------------------------------------------------- Created on 22",
"cmd): \"\"\" run the specified command as a subprocess \"\"\" self.log.debug(\"running: '{0}'\".format(cmd)) try:",
"for line in err.split(\"\\n\"): if line != \"\": # output anything that isn't",
"line != \"\": # output anything that isn't a blank line self.log.warning(\"{0}\".format(line)) def",
"proc = AsyncPopen(cmd, stdout=PIPE, stderr=PIPE) while proc.poll() is None: # while subprocess hasn't",
"not None: self._stderr(err) if out is not None: self._stdout(out) #line = proc.stdout.readline().strip(\"\\n\") #self._stdout(line)",
"self._stderr(line) # when we get to here the subprocess has finished running except",
".00 000... | Email: <EMAIL> 1 01.. | Description: .. | executes a",
"= proc.communicate(\"s\") if err is not None: self._stderr(err) if out is not None:",
"in err.split(\"\\n\"): if line != \"\": # output anything that isn't a blank",
"line = proc.stderr.readline().strip(\"\\n\") # waits for stderr. #TODO: need to put this in",
"# line = proc.stderr.readline().strip(\"\\n\") # waits for stderr. #TODO: need to put this",
"self.log.debug(\"running: '{0}'\".format(cmd)) try: proc = AsyncPopen(cmd, stdout=PIPE, stderr=PIPE) while proc.poll() is None: #",
".10000000000011. .. | Author: <NAME> .00 000... | Email: <EMAIL> 1 01.. |",
"subprocess has finished running except CalledProcessError, e: self.log.error(\"{0}: {1}\".format(e.errno, e.strerror)) #return \"{0}: {1}\".format(e.errno,",
"-------------------------------------------------------------------------------- Created on 22 Sep 2013 @author: GrimHacker ''' import logging from subprocess",
"a thread # self._stderr(line) # when we get to here the subprocess has",
"self.log = logging.getLogger(__name__) def _stdout(self, out): \"\"\" print line from stdout of executed",
"self.log.info(\"{0}\".format(line)) def _stderr(self, err): \"\"\" print line from stderr of executed command \"\"\"",
"out): \"\"\" print line from stdout of executed command \"\"\" for line in",
"!= \"\": # output anything that isn't a blank line self.log.info(\"{0}\".format(line)) def _stderr(self,",
"proc.communicate(\"s\") if err is not None: self._stderr(err) if out is not None: self._stdout(out)",
"thread # self._stderr(line) # when we get to here the subprocess has finished",
"lib.async_subprocess import AsyncPopen, PIPE class Command(): def __init__(self): self.log = logging.getLogger(__name__) def _stdout(self,",
"as a subprocess \"\"\" self.log.debug(\"running: '{0}'\".format(cmd)) try: proc = AsyncPopen(cmd, stdout=PIPE, stderr=PIPE) while",
"| Author: <NAME> .00 000... | Email: <EMAIL> 1 01.. | Description: ..",
"<NAME> .00 000... | Email: <EMAIL> 1 01.. | Description: .. | executes",
"Command(): def __init__(self): self.log = logging.getLogger(__name__) def _stdout(self, out): \"\"\" print line from",
"command .10000000000011. .. | Author: <NAME> .00 000... | Email: <EMAIL> 1 01..",
"a command as a subprocess .. | GrimHacker .. | .. | grimhacker.com",
"command as a subprocess .. | GrimHacker .. | .. | grimhacker.com ..",
"Title: command .10000000000011. .. | Author: <NAME> .00 000... | Email: <EMAIL> 1",
"on 22 Sep 2013 @author: GrimHacker ''' import logging from subprocess import CalledProcessError",
".1111... | Title: command .10000000000011. .. | Author: <NAME> .00 000... | Email:",
"is not None: self._stderr(err) if out is not None: self._stdout(out) #line = proc.stdout.readline().strip(\"\\n\")",
"logging from subprocess import CalledProcessError from lib.async_subprocess import AsyncPopen, PIPE class Command(): def",
"to put this in a thread # self._stderr(line) # when we get to",
"_stdout(self, out): \"\"\" print line from stdout of executed command \"\"\" for line",
"from stdout of executed command \"\"\" for line in out.split(\"\\n\"): if line !=",
"waits for stderr. #TODO: need to put this in a thread # self._stderr(line)",
"GrimHacker .. | .. | grimhacker.com .. | @_grimhacker .. | -------------------------------------------------------------------------------- Created",
"<reponame>GrimHacker/hashcatter ''' .1111... | Title: command .10000000000011. .. | Author: <NAME> .00 000...",
"of executed command \"\"\" for line in err.split(\"\\n\"): if line != \"\": #",
"line from stderr of executed command \"\"\" for line in err.split(\"\\n\"): if line",
"AsyncPopen, PIPE class Command(): def __init__(self): self.log = logging.getLogger(__name__) def _stdout(self, out): \"\"\"",
"anything that isn't a blank line self.log.warning(\"{0}\".format(line)) def _execute(self, cmd): \"\"\" run the",
"stderr of executed command \"\"\" for line in err.split(\"\\n\"): if line != \"\":",
"here the subprocess has finished running except CalledProcessError, e: self.log.error(\"{0}: {1}\".format(e.errno, e.strerror)) #return",
"import AsyncPopen, PIPE class Command(): def __init__(self): self.log = logging.getLogger(__name__) def _stdout(self, out):",
"as a subprocess .. | GrimHacker .. | .. | grimhacker.com .. |",
"\"\": # output anything that isn't a blank line self.log.info(\"{0}\".format(line)) def _stderr(self, err):",
"that isn't a blank line self.log.info(\"{0}\".format(line)) def _stderr(self, err): \"\"\" print line from",
"print line from stderr of executed command \"\"\" for line in err.split(\"\\n\"): if",
"specified command as a subprocess \"\"\" self.log.debug(\"running: '{0}'\".format(cmd)) try: proc = AsyncPopen(cmd, stdout=PIPE,",
"executed command \"\"\" for line in err.split(\"\\n\"): if line != \"\": # output",
"1 01.. | Description: .. | executes a command as a subprocess ..",
"print line from stdout of executed command \"\"\" for line in out.split(\"\\n\"): if",
"subprocess .. | GrimHacker .. | .. | grimhacker.com .. | @_grimhacker ..",
"stderr. #TODO: need to put this in a thread # self._stderr(line) # when",
"run the specified command as a subprocess \"\"\" self.log.debug(\"running: '{0}'\".format(cmd)) try: proc =",
"Email: <EMAIL> 1 01.. | Description: .. | executes a command as a",
"Description: .. | executes a command as a subprocess .. | GrimHacker ..",
"output anything that isn't a blank line self.log.warning(\"{0}\".format(line)) def _execute(self, cmd): \"\"\" run",
"| GrimHacker .. | .. | grimhacker.com .. | @_grimhacker .. | --------------------------------------------------------------------------------",
"@author: GrimHacker ''' import logging from subprocess import CalledProcessError from lib.async_subprocess import AsyncPopen,",
"self._stderr(err) if out is not None: self._stdout(out) #line = proc.stdout.readline().strip(\"\\n\") #self._stdout(line) # line",
"000... | Email: <EMAIL> 1 01.. | Description: .. | executes a command",
"self.log.warning(\"{0}\".format(line)) def _execute(self, cmd): \"\"\" run the specified command as a subprocess \"\"\"",
"''' import logging from subprocess import CalledProcessError from lib.async_subprocess import AsyncPopen, PIPE class",
"if line != \"\": # output anything that isn't a blank line self.log.warning(\"{0}\".format(line))",
"subprocess import CalledProcessError from lib.async_subprocess import AsyncPopen, PIPE class Command(): def __init__(self): self.log",
"proc.stderr.readline().strip(\"\\n\") # waits for stderr. #TODO: need to put this in a thread",
"get to here the subprocess has finished running except CalledProcessError, e: self.log.error(\"{0}: {1}\".format(e.errno,",
"def _stderr(self, err): \"\"\" print line from stderr of executed command \"\"\" for",
"_stderr(self, err): \"\"\" print line from stderr of executed command \"\"\" for line",
"stdout=PIPE, stderr=PIPE) while proc.poll() is None: # while subprocess hasn't finished out, err",
"\"\"\" print line from stdout of executed command \"\"\" for line in out.split(\"\\n\"):",
"@_grimhacker .. | -------------------------------------------------------------------------------- Created on 22 Sep 2013 @author: GrimHacker ''' import",
"blank line self.log.warning(\"{0}\".format(line)) def _execute(self, cmd): \"\"\" run the specified command as a",
"has finished running except CalledProcessError, e: self.log.error(\"{0}: {1}\".format(e.errno, e.strerror)) #return \"{0}: {1}\".format(e.errno, e.strerror)",
"Created on 22 Sep 2013 @author: GrimHacker ''' import logging from subprocess import",
"logging.getLogger(__name__) def _stdout(self, out): \"\"\" print line from stdout of executed command \"\"\"",
"for line in out.split(\"\\n\"): if line != \"\": # output anything that isn't",
"\"\"\" print line from stderr of executed command \"\"\" for line in err.split(\"\\n\"):",
"| @_grimhacker .. | -------------------------------------------------------------------------------- Created on 22 Sep 2013 @author: GrimHacker '''",
"a blank line self.log.info(\"{0}\".format(line)) def _stderr(self, err): \"\"\" print line from stderr of",
"line != \"\": # output anything that isn't a blank line self.log.info(\"{0}\".format(line)) def",
"proc.poll() is None: # while subprocess hasn't finished out, err = proc.communicate(\"s\") if",
"in out.split(\"\\n\"): if line != \"\": # output anything that isn't a blank",
"\"\"\" run the specified command as a subprocess \"\"\" self.log.debug(\"running: '{0}'\".format(cmd)) try: proc",
"# when we get to here the subprocess has finished running except CalledProcessError,",
"'{0}'\".format(cmd)) try: proc = AsyncPopen(cmd, stdout=PIPE, stderr=PIPE) while proc.poll() is None: # while",
"is not None: self._stdout(out) #line = proc.stdout.readline().strip(\"\\n\") #self._stdout(line) # line = proc.stderr.readline().strip(\"\\n\") #",
"when we get to here the subprocess has finished running except CalledProcessError, e:",
"output anything that isn't a blank line self.log.info(\"{0}\".format(line)) def _stderr(self, err): \"\"\" print",
"PIPE class Command(): def __init__(self): self.log = logging.getLogger(__name__) def _stdout(self, out): \"\"\" print",
"line self.log.info(\"{0}\".format(line)) def _stderr(self, err): \"\"\" print line from stderr of executed command",
"''' .1111... | Title: command .10000000000011. .. | Author: <NAME> .00 000... |",
"is None: # while subprocess hasn't finished out, err = proc.communicate(\"s\") if err",
"isn't a blank line self.log.warning(\"{0}\".format(line)) def _execute(self, cmd): \"\"\" run the specified command",
"hasn't finished out, err = proc.communicate(\"s\") if err is not None: self._stderr(err) if",
"if err is not None: self._stderr(err) if out is not None: self._stdout(out) #line",
"put this in a thread # self._stderr(line) # when we get to here",
"# output anything that isn't a blank line self.log.warning(\"{0}\".format(line)) def _execute(self, cmd): \"\"\"",
"<EMAIL> 1 01.. | Description: .. | executes a command as a subprocess",
"class Command(): def __init__(self): self.log = logging.getLogger(__name__) def _stdout(self, out): \"\"\" print line",
"err is not None: self._stderr(err) if out is not None: self._stdout(out) #line =",
"a blank line self.log.warning(\"{0}\".format(line)) def _execute(self, cmd): \"\"\" run the specified command as",
"grimhacker.com .. | @_grimhacker .. | -------------------------------------------------------------------------------- Created on 22 Sep 2013 @author:",
"import logging from subprocess import CalledProcessError from lib.async_subprocess import AsyncPopen, PIPE class Command():",
"\"\"\" for line in err.split(\"\\n\"): if line != \"\": # output anything that",
"None: self._stderr(err) if out is not None: self._stdout(out) #line = proc.stdout.readline().strip(\"\\n\") #self._stdout(line) #",
"not None: self._stdout(out) #line = proc.stdout.readline().strip(\"\\n\") #self._stdout(line) # line = proc.stderr.readline().strip(\"\\n\") # waits",
"#TODO: need to put this in a thread # self._stderr(line) # when we",
"subprocess \"\"\" self.log.debug(\"running: '{0}'\".format(cmd)) try: proc = AsyncPopen(cmd, stdout=PIPE, stderr=PIPE) while proc.poll() is",
"a subprocess .. | GrimHacker .. | .. | grimhacker.com .. | @_grimhacker",
"err = proc.communicate(\"s\") if err is not None: self._stderr(err) if out is not",
"01.. | Description: .. | executes a command as a subprocess .. |",
"out is not None: self._stdout(out) #line = proc.stdout.readline().strip(\"\\n\") #self._stdout(line) # line = proc.stderr.readline().strip(\"\\n\")",
"__init__(self): self.log = logging.getLogger(__name__) def _stdout(self, out): \"\"\" print line from stdout of",
"from subprocess import CalledProcessError from lib.async_subprocess import AsyncPopen, PIPE class Command(): def __init__(self):",
"import CalledProcessError from lib.async_subprocess import AsyncPopen, PIPE class Command(): def __init__(self): self.log =",
"proc.stdout.readline().strip(\"\\n\") #self._stdout(line) # line = proc.stderr.readline().strip(\"\\n\") # waits for stderr. #TODO: need to",
"command as a subprocess \"\"\" self.log.debug(\"running: '{0}'\".format(cmd)) try: proc = AsyncPopen(cmd, stdout=PIPE, stderr=PIPE)",
"\"\"\" self.log.debug(\"running: '{0}'\".format(cmd)) try: proc = AsyncPopen(cmd, stdout=PIPE, stderr=PIPE) while proc.poll() is None:",
"out.split(\"\\n\"): if line != \"\": # output anything that isn't a blank line",
"subprocess hasn't finished out, err = proc.communicate(\"s\") if err is not None: self._stderr(err)",
"while proc.poll() is None: # while subprocess hasn't finished out, err = proc.communicate(\"s\")",
"2013 @author: GrimHacker ''' import logging from subprocess import CalledProcessError from lib.async_subprocess import",
"for stderr. #TODO: need to put this in a thread # self._stderr(line) #",
"\"\": # output anything that isn't a blank line self.log.warning(\"{0}\".format(line)) def _execute(self, cmd):",
"| Description: .. | executes a command as a subprocess .. | GrimHacker",
".. | .. | grimhacker.com .. | @_grimhacker .. | -------------------------------------------------------------------------------- Created on",
"def __init__(self): self.log = logging.getLogger(__name__) def _stdout(self, out): \"\"\" print line from stdout",
"= logging.getLogger(__name__) def _stdout(self, out): \"\"\" print line from stdout of executed command",
"line in out.split(\"\\n\"): if line != \"\": # output anything that isn't a",
"def _stdout(self, out): \"\"\" print line from stdout of executed command \"\"\" for",
"we get to here the subprocess has finished running except CalledProcessError, e: self.log.error(\"{0}:",
"None: # while subprocess hasn't finished out, err = proc.communicate(\"s\") if err is"
] |
[
"f.write(json.dumps(request.get_json())) return '' @app.route('/load') def load(): result = '{ \"markers\": [] }' if",
"def save(): with open('data.json', 'w+') as f: f.write(json.dumps(request.get_json())) return '' @app.route('/load') def load():",
"with open('data.json', 'w+') as f: f.write(json.dumps(request.get_json())) return '' @app.route('/load') def load(): result =",
"def load(): result = '{ \"markers\": [] }' if os.path.isfile('data.json'): with open('data.json', 'r')",
"Flask, render_template, request, json app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/save',",
"os from flask import Flask, render_template, request, json app = Flask(__name__) @app.route('/') def",
"def index(): return render_template('index.html') @app.route('/save', methods=['POST']) def save(): with open('data.json', 'w+') as f:",
"@app.route('/save', methods=['POST']) def save(): with open('data.json', 'w+') as f: f.write(json.dumps(request.get_json())) return '' @app.route('/load')",
"'' @app.route('/load') def load(): result = '{ \"markers\": [] }' if os.path.isfile('data.json'): with",
"render_template('index.html') @app.route('/save', methods=['POST']) def save(): with open('data.json', 'w+') as f: f.write(json.dumps(request.get_json())) return ''",
"from flask import Flask, render_template, request, json app = Flask(__name__) @app.route('/') def index():",
"@app.route('/') def index(): return render_template('index.html') @app.route('/save', methods=['POST']) def save(): with open('data.json', 'w+') as",
"Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/save', methods=['POST']) def save(): with open('data.json', 'w+')",
"json app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/save', methods=['POST']) def save():",
"as f: f.write(json.dumps(request.get_json())) return '' @app.route('/load') def load(): result = '{ \"markers\": []",
"request, json app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/save', methods=['POST']) def",
"flask import Flask, render_template, request, json app = Flask(__name__) @app.route('/') def index(): return",
"open('data.json', 'w+') as f: f.write(json.dumps(request.get_json())) return '' @app.route('/load') def load(): result = '{",
"return render_template('index.html') @app.route('/save', methods=['POST']) def save(): with open('data.json', 'w+') as f: f.write(json.dumps(request.get_json())) return",
"}' if os.path.isfile('data.json'): with open('data.json', 'r') as f: result = f.read() return json.jsonify(result)",
"'w+') as f: f.write(json.dumps(request.get_json())) return '' @app.route('/load') def load(): result = '{ \"markers\":",
"render_template, request, json app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/save', methods=['POST'])",
"@app.route('/load') def load(): result = '{ \"markers\": [] }' if os.path.isfile('data.json'): with open('data.json',",
"methods=['POST']) def save(): with open('data.json', 'w+') as f: f.write(json.dumps(request.get_json())) return '' @app.route('/load') def",
"\"markers\": [] }' if os.path.isfile('data.json'): with open('data.json', 'r') as f: result = f.read()",
"import Flask, render_template, request, json app = Flask(__name__) @app.route('/') def index(): return render_template('index.html')",
"with open('data.json', 'r') as f: result = f.read() return json.jsonify(result) if __name__ ==",
"open('data.json', 'r') as f: result = f.read() return json.jsonify(result) if __name__ == '__main__':",
"f: f.write(json.dumps(request.get_json())) return '' @app.route('/load') def load(): result = '{ \"markers\": [] }'",
"= Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/save', methods=['POST']) def save(): with open('data.json',",
"return '' @app.route('/load') def load(): result = '{ \"markers\": [] }' if os.path.isfile('data.json'):",
"load(): result = '{ \"markers\": [] }' if os.path.isfile('data.json'): with open('data.json', 'r') as",
"'r') as f: result = f.read() return json.jsonify(result) if __name__ == '__main__': app.run()",
"result = '{ \"markers\": [] }' if os.path.isfile('data.json'): with open('data.json', 'r') as f:",
"os.path.isfile('data.json'): with open('data.json', 'r') as f: result = f.read() return json.jsonify(result) if __name__",
"index(): return render_template('index.html') @app.route('/save', methods=['POST']) def save(): with open('data.json', 'w+') as f: f.write(json.dumps(request.get_json()))",
"import os from flask import Flask, render_template, request, json app = Flask(__name__) @app.route('/')",
"if os.path.isfile('data.json'): with open('data.json', 'r') as f: result = f.read() return json.jsonify(result) if",
"[] }' if os.path.isfile('data.json'): with open('data.json', 'r') as f: result = f.read() return",
"'{ \"markers\": [] }' if os.path.isfile('data.json'): with open('data.json', 'r') as f: result =",
"app = Flask(__name__) @app.route('/') def index(): return render_template('index.html') @app.route('/save', methods=['POST']) def save(): with",
"= '{ \"markers\": [] }' if os.path.isfile('data.json'): with open('data.json', 'r') as f: result",
"save(): with open('data.json', 'w+') as f: f.write(json.dumps(request.get_json())) return '' @app.route('/load') def load(): result"
] |
[
"from __future__ import print_function class Metrics(object): def scalar(self, name, y, x=None): raise NotImplementedError"
] |
[
"for i in url: m = i.url data = { \"url\": m, \"agg\":",
"DataInstruct() url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m = \"\" for i in url: m =",
"agg): time = DataInstruct() url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m = \"\" for i in",
"= Sites.objects.filter(user_id=user_id).all() self.userSitesDay = self.userSites.filter(date=self.date).all() def get_data(self, query, agg): time = DataInstruct() url",
"def average(self): q = self.userSitesDay.aggregate(Avg('dailyTime')) return self.get_data(q, 'avg') def sum(self): q = self.userSitesDay.aggregate(Sum('dailyTime'))",
"self.userSites.filter(date=self.date).all() def get_data(self, query, agg): time = DataInstruct() url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m =",
"TimeTracker.models import Sites from datetime import datetime from .dataFunc import DataInstruct class Aggregators():",
".dataFunc import DataInstruct class Aggregators(): def __init__(self, user_id): self.now = datetime.now() self.date =",
"i in url: m = i.url data = { \"url\": m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)])",
"max(self): q = self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q, 'max') def min(self): q = self.userSitesDay.aggregate(Min('dailyTime')) return",
"url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m = \"\" for i in url: m = i.url",
"self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q, 'max') def min(self): q = self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q, 'min') def",
"self.userSites = Sites.objects.filter(user_id=user_id).all() self.userSitesDay = self.userSites.filter(date=self.date).all() def get_data(self, query, agg): time = DataInstruct()",
"= self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q, 'max') def min(self): q = self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q, 'min')",
"self.date = self.now.strftime(\"%Y-%m-%d\") self.userSites = Sites.objects.filter(user_id=user_id).all() self.userSitesDay = self.userSites.filter(date=self.date).all() def get_data(self, query, agg):",
"\"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return data def max(self): q = self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q, 'max')",
"} return data def max(self): q = self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q, 'max') def min(self):",
"Sites from datetime import datetime from .dataFunc import DataInstruct class Aggregators(): def __init__(self,",
"import Sites from datetime import datetime from .dataFunc import DataInstruct class Aggregators(): def",
"= self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q, 'min') def average(self): q = self.userSitesDay.aggregate(Avg('dailyTime')) return self.get_data(q, 'avg')",
"import Sum, Avg, Max, Min from TimeTracker.models import Sites from datetime import datetime",
"return data def max(self): q = self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q, 'max') def min(self): q",
"= self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m = \"\" for i in url: m = i.url data",
"def min(self): q = self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q, 'min') def average(self): q = self.userSitesDay.aggregate(Avg('dailyTime'))",
"q = self.userSitesDay.aggregate(Avg('dailyTime')) return self.get_data(q, 'avg') def sum(self): q = self.userSitesDay.aggregate(Sum('dailyTime')) return self.get_data(q,",
"q = self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q, 'min') def average(self): q = self.userSitesDay.aggregate(Avg('dailyTime')) return self.get_data(q,",
"datetime import datetime from .dataFunc import DataInstruct class Aggregators(): def __init__(self, user_id): self.now",
"self.get_data(q, 'min') def average(self): q = self.userSitesDay.aggregate(Avg('dailyTime')) return self.get_data(q, 'avg') def sum(self): q",
"data def max(self): q = self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q, 'max') def min(self): q =",
"m = i.url data = { \"url\": m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return data",
"'max') def min(self): q = self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q, 'min') def average(self): q =",
"time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return data def max(self): q = self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q, 'max') def",
"DataInstruct class Aggregators(): def __init__(self, user_id): self.now = datetime.now() self.date = self.now.strftime(\"%Y-%m-%d\") self.userSites",
"from datetime import datetime from .dataFunc import DataInstruct class Aggregators(): def __init__(self, user_id):",
"self.userSitesDay = self.userSites.filter(date=self.date).all() def get_data(self, query, agg): time = DataInstruct() url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)])",
"django.db.models import Sum, Avg, Max, Min from TimeTracker.models import Sites from datetime import",
"return self.get_data(q, 'min') def average(self): q = self.userSitesDay.aggregate(Avg('dailyTime')) return self.get_data(q, 'avg') def sum(self):",
"from django.db.models import Sum, Avg, Max, Min from TimeTracker.models import Sites from datetime",
"= self.now.strftime(\"%Y-%m-%d\") self.userSites = Sites.objects.filter(user_id=user_id).all() self.userSitesDay = self.userSites.filter(date=self.date).all() def get_data(self, query, agg): time",
"m = \"\" for i in url: m = i.url data = {",
"in url: m = i.url data = { \"url\": m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) }",
"average(self): q = self.userSitesDay.aggregate(Avg('dailyTime')) return self.get_data(q, 'avg') def sum(self): q = self.userSitesDay.aggregate(Sum('dailyTime')) return",
"= \"\" for i in url: m = i.url data = { \"url\":",
"import datetime from .dataFunc import DataInstruct class Aggregators(): def __init__(self, user_id): self.now =",
"data = { \"url\": m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return data def max(self): q",
"= { \"url\": m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return data def max(self): q =",
"self.now = datetime.now() self.date = self.now.strftime(\"%Y-%m-%d\") self.userSites = Sites.objects.filter(user_id=user_id).all() self.userSitesDay = self.userSites.filter(date=self.date).all() def",
"= self.userSites.filter(date=self.date).all() def get_data(self, query, agg): time = DataInstruct() url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m",
"query, agg): time = DataInstruct() url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m = \"\" for i",
"Min from TimeTracker.models import Sites from datetime import datetime from .dataFunc import DataInstruct",
"min(self): q = self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q, 'min') def average(self): q = self.userSitesDay.aggregate(Avg('dailyTime')) return",
"= self.userSitesDay.aggregate(Avg('dailyTime')) return self.get_data(q, 'avg') def sum(self): q = self.userSitesDay.aggregate(Sum('dailyTime')) return self.get_data(q, 'sum')",
"def max(self): q = self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q, 'max') def min(self): q = self.userSitesDay.aggregate(Min('dailyTime'))",
"Sites.objects.filter(user_id=user_id).all() self.userSitesDay = self.userSites.filter(date=self.date).all() def get_data(self, query, agg): time = DataInstruct() url =",
"Aggregators(): def __init__(self, user_id): self.now = datetime.now() self.date = self.now.strftime(\"%Y-%m-%d\") self.userSites = Sites.objects.filter(user_id=user_id).all()",
"self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q, 'min') def average(self): q = self.userSitesDay.aggregate(Avg('dailyTime')) return self.get_data(q, 'avg') def",
"= i.url data = { \"url\": m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return data def",
"from TimeTracker.models import Sites from datetime import datetime from .dataFunc import DataInstruct class",
"time = DataInstruct() url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m = \"\" for i in url:",
"def __init__(self, user_id): self.now = datetime.now() self.date = self.now.strftime(\"%Y-%m-%d\") self.userSites = Sites.objects.filter(user_id=user_id).all() self.userSitesDay",
"self.now.strftime(\"%Y-%m-%d\") self.userSites = Sites.objects.filter(user_id=user_id).all() self.userSitesDay = self.userSites.filter(date=self.date).all() def get_data(self, query, agg): time =",
"datetime from .dataFunc import DataInstruct class Aggregators(): def __init__(self, user_id): self.now = datetime.now()",
"def get_data(self, query, agg): time = DataInstruct() url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m = \"\"",
"self.get_data(q, 'max') def min(self): q = self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q, 'min') def average(self): q",
"Sum, Avg, Max, Min from TimeTracker.models import Sites from datetime import datetime from",
"\"\" for i in url: m = i.url data = { \"url\": m,",
"= DataInstruct() url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m = \"\" for i in url: m",
"class Aggregators(): def __init__(self, user_id): self.now = datetime.now() self.date = self.now.strftime(\"%Y-%m-%d\") self.userSites =",
"url: m = i.url data = { \"url\": m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return",
"user_id): self.now = datetime.now() self.date = self.now.strftime(\"%Y-%m-%d\") self.userSites = Sites.objects.filter(user_id=user_id).all() self.userSitesDay = self.userSites.filter(date=self.date).all()",
"{ \"url\": m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return data def max(self): q = self.userSitesDay.aggregate(Max('dailyTime'))",
"import DataInstruct class Aggregators(): def __init__(self, user_id): self.now = datetime.now() self.date = self.now.strftime(\"%Y-%m-%d\")",
"Max, Min from TimeTracker.models import Sites from datetime import datetime from .dataFunc import",
"q = self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q, 'max') def min(self): q = self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q,",
"from .dataFunc import DataInstruct class Aggregators(): def __init__(self, user_id): self.now = datetime.now() self.date",
"__init__(self, user_id): self.now = datetime.now() self.date = self.now.strftime(\"%Y-%m-%d\") self.userSites = Sites.objects.filter(user_id=user_id).all() self.userSitesDay =",
"<reponame>Ashkan-Agc/web-time-tracker<gh_stars>1-10 from django.db.models import Sum, Avg, Max, Min from TimeTracker.models import Sites from",
"return self.get_data(q, 'max') def min(self): q = self.userSitesDay.aggregate(Min('dailyTime')) return self.get_data(q, 'min') def average(self):",
"datetime.now() self.date = self.now.strftime(\"%Y-%m-%d\") self.userSites = Sites.objects.filter(user_id=user_id).all() self.userSitesDay = self.userSites.filter(date=self.date).all() def get_data(self, query,",
"m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return data def max(self): q = self.userSitesDay.aggregate(Max('dailyTime')) return self.get_data(q,",
"i.url data = { \"url\": m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return data def max(self):",
"Avg, Max, Min from TimeTracker.models import Sites from datetime import datetime from .dataFunc",
"'min') def average(self): q = self.userSitesDay.aggregate(Avg('dailyTime')) return self.get_data(q, 'avg') def sum(self): q =",
"self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m = \"\" for i in url: m = i.url data =",
"= datetime.now() self.date = self.now.strftime(\"%Y-%m-%d\") self.userSites = Sites.objects.filter(user_id=user_id).all() self.userSitesDay = self.userSites.filter(date=self.date).all() def get_data(self,",
"\"url\": m, \"agg\": time.convert_to_time(query['dailyTime__{}'.format(agg)]) } return data def max(self): q = self.userSitesDay.aggregate(Max('dailyTime')) return",
"get_data(self, query, agg): time = DataInstruct() url = self.userSitesDay.filter(time=query['dailyTime__{}'.format(agg)]) m = \"\" for"
] |
[
"**kwargs): loss, timestamps, grads = trace if abscissa == 'time': x = timestamps",
"grads = tf.reduce_sum(tf.square(grads), axis=-1)/norm if log_scale is True: grads = tf.math.log(grads) loss =",
"= array[:, 0] Y = array[:, 1] ax.scatter(X, Y, **kwargs) def plot_experiments(experiments, axes,",
"y) concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X, [-1])])) prob = dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob,",
"ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor, facecolor=color) def scatter_plot_voronoi(qdist,n,",
"Axes. name: Python `str` name dataset name for title display. name: Iterable of",
"Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False, show_vertices=False, s=1,ax=ax) for r in range(len(vor.point_region)): if r == 12:",
"polygon = [vor.vertices[i] for i in region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1], 'ko', markersize=2)",
"][i % 9] TFColor = _TFColor() def plot2d(array, ax=None, **kwargs): ''' Two dimension",
"index is the dimension index. ax: Matplotlib Axes. If None, one would be",
"an array. Args: array: 2D array. The right-most index is the dimension index.",
"not None: xlim, ylim = limits elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim) if gradylimit is not",
"minima = min(speed) maxima = max(speed) norm = colors.Normalize(vmin=minima, vmax=maxima, clip=True) mapper =",
"len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax, label='MCVI', log_scale=log_scale, coeff=-1, color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax, label='MCVI',",
"alpha=1, gradaxalpha=1,norm=1, **kwargs): loss, timestamps, grads = trace if abscissa == 'time': x",
"edgecolor = '#CC4F1B' alpha = .6 if log_scale is True: logmean = tf.math.log(smoothed_mean)",
"= 51 mean = tf.reduce_mean(ts/norm, axis=-1) variance = tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean = savgol_filter(mean,",
"gradylimit is not None: xlim, _ = limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if k !=",
"log_scale=False, c=None, num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1, **kwargs): loss, timestamps, grads = trace if abscissa",
"`tuple` for xlim and ylim for each plot in Axes. name: Python `str`",
"num_burnin_steps: Python `int`: Number of step to ignore for display. \"\"\" for k,",
"dataset, limits=None, gradylimit=None, dataset_name=None, vi_type=['all'], abscissa='epochs', num_burnin_steps=0, norm=1, **kwargs): \"\"\"Plotting MCVI, QVI and",
"for r in range(len(vor.point_region)): if r == 12: pass else: region = vor.regions[vor.point_region[r]]",
"from scipy.signal import savgol_filter from qvi.core.experiments import compute_traces_from_multiple_trainning class _TFColor(object): \"\"\"Enum of colors",
"gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c, label=name, alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax, gradax, traces, abscissa='time', name='', log_scale=False, num_burnin_steps=0,color='red',",
"vor.regions[vor.point_region[r]] if not -1 in region: polygon = [vor.vertices[i] for i in region]",
"used in TF docs.\"\"\" red = '#F15854' blue = '#5DA5DA' orange = '#FAA43A'",
"= axes[line, k] gradax = axes[line+1, k] qmc_ls = 'x' rqmc_ls = 'v'",
"0] Y = array[:, 1] ax.scatter(X, Y, **kwargs) def plot_experiments(experiments, axes, dataset, limits=None,",
"'qmc' in vi_type: plot(elboax, gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs) if",
"log_scale=False,color='red', coeff=1, norm=1, lw=.7,**kwargs): window_length_mean = 51 window_length_var = 51 mean = tf.reduce_mean(ts/norm,",
"= tf.linspace(ymin, ymax, mesh_count) X, Y = tf.meshgrid(x, y) concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y, [-1]),",
"x=x[num_burnin_steps:], ax=elboax, label='MCVI', log_scale=log_scale, coeff=-1, color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def",
"log_scale=log_scale, coeff=-1, color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts, x, ax,",
"grads[num_burnin_steps:], c=c, label=name, alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax, gradax, traces, abscissa='time', name='', log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs):",
"for xlim and ylim for each plot in Axes. name: Python `str` name",
"== 'epochs': x = np.arange(0, len(loss)) grads = tf.reduce_sum(tf.square(grads), axis=-1)/norm if log_scale is",
"or 'qvi' in vi_type: plot(elboax, gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs)",
"xlabel = None else: xlabel = None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog') if limits is",
"not -1 in region: polygon = [vor.vertices[i] for i in region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r]))",
"gradax, traces, abscissa='time', name='', log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs): losses, timestamps, grads = compute_traces_from_multiple_trainning(traces) if",
"prob = prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))), origin=\"lower\") plt.xticks([0, mesh_count * 0.25, mesh_count *",
"the dimension index. ax: Matplotlib Axes. If None, one would be created. kwargs:",
"in enumerate(sorted(experiments, key=lambda e: e.optimizer_params['learning_rate'])): line = 2*dataset elboax = axes[line, k] gradax",
"display. name: Iterable of Python `str`: 'All', 'mc', 'QVI', 'RQVI'. abscissa: Python `str`:",
"savgol_filter(variance, window_length_var, 3) import pdb #pdb.set_trace() edgecolor = '#CC4F1B' alpha = .6 if",
"smoothed_variance = savgol_filter(variance, window_length_var, 3) import pdb #pdb.set_trace() edgecolor = '#CC4F1B' alpha =",
"mean = tf.reduce_mean(ts/norm, axis=-1) variance = tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean = savgol_filter(mean, window_length_mean, 2)",
"orange = '#FAA43A' green = '#60BD68' pink = '#F17CB0' brown = '#B2912F' purple",
"scatter plot. ''' ax = plt.subplot() if ax is None else ax X",
"np import matplotlib.pyplot as plt from matplotlib import cm from matplotlib import colors",
"name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'rqmc' in vi_type:",
"= '#CC4F1B' alpha = .6 if log_scale is True: logmean = tf.math.log(smoothed_mean) logvariance",
"ylim for each plot in Axes. name: Python `str` name dataset name for",
"in vi_type or 'mc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC VI Multiple',",
"be created. kwargs: key word argument for the scatter plot. ''' ax =",
"vi_type: plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all'",
"Iterable of Python `str`: 'All', 'mc', 'QVI', 'RQVI'. abscissa: Python `str`: `time` or",
"name='RQVI', log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qmc' in vi_type:",
"xlabel = 'iterations' xlabel = None else: xlabel = None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog')",
"= '^' every = 70 if 'all' in vi_type or 'rqvi' in vi_type:",
"colors used in TF docs.\"\"\" red = '#F15854' blue = '#5DA5DA' orange =",
"rqvi = '#abd9e9' mc = '#d7191c' def __getitem__(self, i): return [ self.red, self.orange,",
"num_burnin_steps=0, norm=1, **kwargs): \"\"\"Plotting MCVI, QVI and RQVI Experiment Args: experiments: Iterable of",
"if r == 12: pass else: region = vor.regions[vor.point_region[r]] if not -1 in",
"= 70 if 'all' in vi_type or 'rqvi' in vi_type: plot(elboax, gradax, experiment.rqvitraces,",
"= prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))), origin=\"lower\") plt.xticks([0, mesh_count * 0.25, mesh_count * 0.5,",
"def plot_multiple_traces(elboax, gradax, traces, abscissa='time', name='', log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs): losses, timestamps, grads =",
"alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax, gradax, traces, abscissa='time', name='', log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs): losses, timestamps, grads",
"of step to ignore for display. \"\"\" for k, experiment in enumerate(sorted(experiments, key=lambda",
"coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor, facecolor=color) def scatter_plot_voronoi(qdist,n, ax, title=''): q_samples = qdist.sample(n) speed",
"= '#2c7bb6' rqvi = '#abd9e9' mc = '#d7191c' def __getitem__(self, i): return [",
"pink = '#F17CB0' brown = '#B2912F' purple = '#B276B2' yellow = '#DECF3F' gray",
"abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'mc' in",
"as plt from matplotlib import cm from matplotlib import colors from scipy.spatial import",
"instances. axes: Matplotlib Axe. dataset: Dataset Number. Must be comptatible with axes. limites:",
"`time` or `epochs`. num_burnin_steps: Python `int`: Number of step to ignore for display.",
"for k, experiment in enumerate(sorted(experiments, key=lambda e: e.optimizer_params['learning_rate'])): line = 2*dataset elboax =",
"vi_type: plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC VI Multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if",
"if gradylimit is not None: xlim, _ = limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if k",
"= .6 if log_scale is True: logmean = tf.math.log(smoothed_mean) logvariance = smoothed_variance/smoothed_mean ax.fill_between(x,",
"if 'all' in vi_type or 'qvi' in vi_type: plot(elboax, gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI',",
"plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in",
"elboax.set_title('{}'.format( dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax, gradax, trace, abscissa='time', name='', log_scale=False, c=None, num_burnin_steps=0,",
"region: polygon = [vor.vertices[i] for i in region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1], 'ko',",
"abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qmc' in",
"is not None: xlim, ylim = limits elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim) if gradylimit is",
"limites: Python `tuple` for xlim and ylim for each plot in Axes. name:",
"RQVI Experiment Args: experiments: Iterable of Experiment instances. axes: Matplotlib Axe. dataset: Dataset",
"ymin=-4.0, ymax=4.0, mesh_count=1000, name=None): plt.figure() x = tf.linspace(xmin, xmax, mesh_count) y = tf.linspace(ymin,",
"coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) else: ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance,",
"`str` name dataset name for title display. name: Iterable of Python `str`: 'All',",
"import Voronoi, voronoi_plot_2d from scipy.signal import savgol_filter from qvi.core.experiments import compute_traces_from_multiple_trainning class _TFColor(object):",
"numpy as np import matplotlib.pyplot as plt from matplotlib import cm from matplotlib",
"of Python `str`: 'All', 'mc', 'QVI', 'RQVI'. abscissa: Python `str`: `time` or `epochs`.",
"ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) else: ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance,",
"lw=lw,**kwargs) else: ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor, facecolor=color)",
"show_points=False, show_vertices=False, s=1,ax=ax) for r in range(len(vor.point_region)): if r == 12: pass else:",
"0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both', length=0) elboax.tick_params(axis=u'both', which=u'both', length=0) if k == 0:",
"tensorflow as tf import numpy as np import matplotlib.pyplot as plt from matplotlib",
"i): return [ self.red, self.orange, self.green, self.blue, self.pink, self.brown, self.purple, self.yellow, self.gray, ][i",
"the scatter plot. ''' ax = plt.subplot() if ax is None else ax",
"mesh_count=1000, name=None): plt.figure() x = tf.linspace(xmin, xmax, mesh_count) y = tf.linspace(ymin, ymax, mesh_count)",
"gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'rqmc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces,",
"None else: xlabel = None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog') if limits is not None:",
"0.75, mesh_count], [xmin, xmin/2, 0, xmax/2, xmax]) plt.yticks([0, mesh_count * 0.25, mesh_count *",
"== 0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax, gradax, trace, abscissa='time',",
"log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs): losses, timestamps, grads = compute_traces_from_multiple_trainning(traces) if abscissa == 'time': x",
"title display. name: Iterable of Python `str`: 'All', 'mc', 'QVI', 'RQVI'. abscissa: Python",
"True: grads = tf.math.log(grads) loss = tf.math.log(loss) elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:], c=c, label=name, alpha=alpha,**kwargs)",
"0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count], [ymin, ymin/2, 0, ymax/2, ymax])",
"k == 0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax, gradax, trace,",
"voronoi_plot_2d(vor, show_points=False, show_vertices=False, s=1,ax=ax) for r in range(len(vor.point_region)): if r == 12: pass",
"name='', log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs): losses, timestamps, grads = compute_traces_from_multiple_trainning(traces) if abscissa == 'time':",
"name='RQMC multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qvi' in vi_type:",
"and ylim for each plot in Axes. name: Python `str` name dataset name",
"tf import numpy as np import matplotlib.pyplot as plt from matplotlib import cm",
"x = np.arange(0, len(loss)) grads = tf.reduce_sum(tf.square(grads), axis=-1)/norm if log_scale is True: grads",
"log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qmc' in vi_type: plot(elboax,",
"matplotlib.pyplot as plt from matplotlib import cm from matplotlib import colors from scipy.spatial",
"alpha=alpha, edgecolor=edgecolor, facecolor=color) def scatter_plot_voronoi(qdist,n, ax, title=''): q_samples = qdist.sample(n) speed = qdist.weights",
"norm=1,**kwargs): losses, timestamps, grads = compute_traces_from_multiple_trainning(traces) if abscissa == 'time': x = timestamps",
"c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qmc' in vi_type: plot(elboax, gradax,",
"= tf.math.log(smoothed_mean) logvariance = smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor, facecolor=color) ax.plot(x, coeff*smoothed_mean,",
"= 'iterations' xlabel = None else: xlabel = None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog') if",
"k] gradax = axes[line+1, k] qmc_ls = 'x' rqmc_ls = 'v' qvi_ls =",
"mesh_count * 0.75, mesh_count], [xmin, xmin/2, 0, xmax/2, xmax]) plt.yticks([0, mesh_count * 0.25,",
"purple = '#B276B2' yellow = '#DECF3F' gray = '#4D4D4D' qmc = '#33a02c' rqmc",
"'#2c7bb6' rqvi = '#abd9e9' mc = '#d7191c' def __getitem__(self, i): return [ self.red,",
"tf.reduce_mean(ts/norm, axis=-1) variance = tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean = savgol_filter(mean, window_length_mean, 2) smoothed_variance =",
"prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))), origin=\"lower\") plt.xticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count",
"ignore for display. \"\"\" for k, experiment in enumerate(sorted(experiments, key=lambda e: e.optimizer_params['learning_rate'])): line",
"Multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if abscissa == 'time': xlabel = 'time(s)' elif",
"num_burnin_steps=0,color='red', norm=1,**kwargs): losses, timestamps, grads = compute_traces_from_multiple_trainning(traces) if abscissa == 'time': x =",
"= max(speed) norm = colors.Normalize(vmin=minima, vmax=maxima, clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor =",
"'all' in vi_type or 'qvi' in vi_type: plot(elboax, gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False,",
"def scatter_plot_voronoi(qdist,n, ax, title=''): q_samples = qdist.sample(n) speed = qdist.weights minima = min(speed)",
"right-most index is the dimension index. ax: Matplotlib Axes. If None, one would",
"VI Multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if abscissa == 'time': xlabel = 'time(s)'",
"plt.xticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count], [xmin, xmin/2,",
"if ax is None else ax X = array[:, 0] Y = array[:,",
"is None else ax X = array[:, 0] Y = array[:, 1] ax.scatter(X,",
"to ignore for display. \"\"\" for k, experiment in enumerate(sorted(experiments, key=lambda e: e.optimizer_params['learning_rate'])):",
"''' ax = plt.subplot() if ax is None else ax X = array[:,",
"[ self.red, self.orange, self.green, self.blue, self.pink, self.brown, self.purple, self.yellow, self.gray, ][i % 9]",
"'time': xlabel = 'time(s)' elif abscissa == 'epochs': xlabel = 'iterations' xlabel =",
"'rqmc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs)",
"qmc_ls = 'x' rqmc_ls = 'v' qvi_ls = '' rqvi_ls = '.' mc_ls",
"'' rqvi_ls = '.' mc_ls = '^' every = 70 if 'all' in",
"_TFColor(object): \"\"\"Enum of colors used in TF docs.\"\"\" red = '#F15854' blue =",
"abscissa == 'epochs': xlabel = 'iterations' xlabel = None else: xlabel = None",
"or `epochs`. num_burnin_steps: Python `int`: Number of step to ignore for display. \"\"\"",
"every = 70 if 'all' in vi_type or 'rqvi' in vi_type: plot(elboax, gradax,",
"= compute_traces_from_multiple_trainning(traces) if abscissa == 'time': x = timestamps - timestamps[num_burnin_steps] elif abscissa",
"elif abscissa == 'epochs': xlabel = 'iterations' xlabel = None else: xlabel =",
"edgecolor=edgecolor, facecolor=color) def scatter_plot_voronoi(qdist,n, ax, title=''): q_samples = qdist.sample(n) speed = qdist.weights minima",
"[-1]), tf.reshape(X, [-1])])) prob = dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow') prob = prob.numpy()",
"'#33a02c' rqmc = '#fdae61' qvi = '#2c7bb6' rqvi = '#abd9e9' mc = '#d7191c'",
"in vi_type: plot(elboax, gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if 'all'",
"else: ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor, facecolor=color) def",
"in vi_type or 'qmc' in vi_type: plot(elboax, gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps,",
"which=u'both', length=0) elboax.tick_params(axis=u'both', which=u'both', length=0) if k == 0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name)",
"Experiment Args: experiments: Iterable of Experiment instances. axes: Matplotlib Axe. dataset: Dataset Number.",
"min(speed) maxima = max(speed) norm = colors.Normalize(vmin=minima, vmax=maxima, clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=cm.Reds)",
"in vi_type: plot(elboax, gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all'",
"= np.arange(0, len(loss)) grads = tf.reduce_sum(tf.square(grads), axis=-1)/norm if log_scale is True: grads =",
"= '#60BD68' pink = '#F17CB0' brown = '#B2912F' purple = '#B276B2' yellow =",
"timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0, len(loss)) grads = tf.reduce_sum(tf.square(grads), axis=-1)/norm",
"if not -1 in region: polygon = [vor.vertices[i] for i in region] plt.fill(*zip(*polygon),",
"of Experiment instances. axes: Matplotlib Axe. dataset: Dataset Number. Must be comptatible with",
"tf.math.log(smoothed_mean) logvariance = smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor, facecolor=color) ax.plot(x, coeff*smoothed_mean, c=color,",
"logvariance = smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor, facecolor=color) ax.plot(x, coeff*smoothed_mean, c=color, label=label,",
"region = vor.regions[vor.point_region[r]] if not -1 in region: polygon = [vor.vertices[i] for i",
"0, xmax/2, xmax]) plt.yticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count * 0.75,",
"in vi_type or 'qvi' in vi_type: plot(elboax, gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False, c=TFColor.qvi,",
"xlim and ylim for each plot in Axes. name: Python `str` name dataset",
"C=prob, cmap='rainbow') prob = prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))), origin=\"lower\") plt.xticks([0, mesh_count * 0.25,",
"\"\"\" for k, experiment in enumerate(sorted(experiments, key=lambda e: e.optimizer_params['learning_rate'])): line = 2*dataset elboax",
"if k != 0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both', length=0) elboax.tick_params(axis=u'both', which=u'both', length=0) if",
"norm=1, **kwargs): \"\"\"Plotting MCVI, QVI and RQVI Experiment Args: experiments: Iterable of Experiment",
"matplotlib import cm from matplotlib import colors from scipy.spatial import Voronoi, voronoi_plot_2d from",
"in vi_type: plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if",
"== 'epochs': xlabel = 'iterations' xlabel = None else: xlabel = None gradax.set_xlabel(xlabel)",
"plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))), origin=\"lower\") plt.xticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count *",
"limits=None, gradylimit=None, dataset_name=None, vi_type=['all'], abscissa='epochs', num_burnin_steps=0, norm=1, **kwargs): \"\"\"Plotting MCVI, QVI and RQVI",
"`str`: `time` or `epochs`. num_burnin_steps: Python `int`: Number of step to ignore for",
"in vi_type or 'rqvi' in vi_type: plot(elboax, gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False, c=TFColor.rqvi,",
"ax is None else ax X = array[:, 0] Y = array[:, 1]",
"ylim = limits elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim) if gradylimit is not None: xlim, _",
"concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow') prob = prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))), origin=\"lower\") plt.xticks([0, mesh_count *",
"plot of an array. Args: array: 2D array. The right-most index is the",
"(mesh_count, mesh_count))), origin=\"lower\") plt.xticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count * 0.75,",
"vi_type or 'rqmc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC multiple', log_scale=True,",
"length=0) elboax.tick_params(axis=u'both', which=u'both', length=0) if k == 0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name) +",
"'ko', markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title) def plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0, ymin=-4.0, ymax=4.0, mesh_count=1000, name=None):",
"norm = colors.Normalize(vmin=minima, vmax=maxima, clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor = Voronoi(q_samples.numpy()) voronoi_plot_2d(vor,",
"Matplotlib Axe. dataset: Dataset Number. Must be comptatible with axes. limites: Python `tuple`",
"gradax = axes[line+1, k] qmc_ls = 'x' rqmc_ls = 'v' qvi_ls = ''",
"Python `int`: Number of step to ignore for display. \"\"\" for k, experiment",
"qvi = '#2c7bb6' rqvi = '#abd9e9' mc = '#d7191c' def __getitem__(self, i): return",
"dataset: Dataset Number. Must be comptatible with axes. limites: Python `tuple` for xlim",
"abscissa='time', name='', log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs): losses, timestamps, grads = compute_traces_from_multiple_trainning(traces) if abscissa ==",
"'#F17CB0' brown = '#B2912F' purple = '#B276B2' yellow = '#DECF3F' gray = '#4D4D4D'",
"gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog') if limits is not None: xlim, ylim = limits elboax.set_xlim(xlim)",
"color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1], 'ko', markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title) def plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0, ymin=-4.0,",
"xlim, ylim = limits elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim) if gradylimit is not None: xlim,",
"\"\"\"Plotting MCVI, QVI and RQVI Experiment Args: experiments: Iterable of Experiment instances. axes:",
"Experiment instances. axes: Matplotlib Axe. dataset: Dataset Number. Must be comptatible with axes.",
"dataset_name=None, vi_type=['all'], abscissa='epochs', num_burnin_steps=0, norm=1, **kwargs): \"\"\"Plotting MCVI, QVI and RQVI Experiment Args:",
"for each plot in Axes. name: Python `str` name dataset name for title",
"speed = qdist.weights minima = min(speed) maxima = max(speed) norm = colors.Normalize(vmin=minima, vmax=maxima,",
"rqmc = '#fdae61' qvi = '#2c7bb6' rqvi = '#abd9e9' mc = '#d7191c' def",
"enumerate(sorted(experiments, key=lambda e: e.optimizer_params['learning_rate'])): line = 2*dataset elboax = axes[line, k] gradax =",
"c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'mc' in vi_type: plot_multiple_traces(elboax, gradax,",
"gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both', length=0) elboax.tick_params(axis=u'both', which=u'both', length=0) if k == 0: elboax.set_ylabel('ELBO')",
"len(loss)) grads = tf.reduce_sum(tf.square(grads), axis=-1)/norm if log_scale is True: grads = tf.math.log(grads) loss",
"plot_experiments(experiments, axes, dataset, limits=None, gradylimit=None, dataset_name=None, vi_type=['all'], abscissa='epochs', num_burnin_steps=0, norm=1, **kwargs): \"\"\"Plotting MCVI,",
"region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1], 'ko', markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title) def plot_heatmap_2d(dist, xmin=-4.0,",
"index. ax: Matplotlib Axes. If None, one would be created. kwargs: key word",
"array. The right-most index is the dimension index. ax: Matplotlib Axes. If None,",
"mapper = cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor = Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False, show_vertices=False, s=1,ax=ax) for r",
"ymax, mesh_count) X, Y = tf.meshgrid(x, y) concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X, [-1])]))",
"qdist.weights minima = min(speed) maxima = max(speed) norm = colors.Normalize(vmin=minima, vmax=maxima, clip=True) mapper",
"norm=norm,**kwargs) if 'all' in vi_type or 'mc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa,",
"if abscissa == 'time': xlabel = 'time(s)' elif abscissa == 'epochs': xlabel =",
"clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor = Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False, show_vertices=False, s=1,ax=ax) for",
"gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or",
"abscissa == 'time': x = timestamps - timestamps[num_burnin_steps] elif abscissa == 'epochs': x",
"gradax.tick_params(axis=u'y', which=u'both', length=0) elboax.tick_params(axis=u'both', which=u'both', length=0) if k == 0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format(",
"mean_var_ts(ts, x, ax, label=None, log_scale=False,color='red', coeff=1, norm=1, lw=.7,**kwargs): window_length_mean = 51 window_length_var =",
"axis=-1) variance = tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean = savgol_filter(mean, window_length_mean, 2) smoothed_variance = savgol_filter(variance,",
"label=name, alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax, gradax, traces, abscissa='time', name='', log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs): losses, timestamps,",
"gradylimit=None, dataset_name=None, vi_type=['all'], abscissa='epochs', num_burnin_steps=0, norm=1, **kwargs): \"\"\"Plotting MCVI, QVI and RQVI Experiment",
"line = 2*dataset elboax = axes[line, k] gradax = axes[line+1, k] qmc_ls =",
"abscissa: Python `str`: `time` or `epochs`. num_burnin_steps: Python `int`: Number of step to",
"Two dimension plot of an array. Args: array: 2D array. The right-most index",
"X = array[:, 0] Y = array[:, 1] ax.scatter(X, Y, **kwargs) def plot_experiments(experiments,",
"= Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False, show_vertices=False, s=1,ax=ax) for r in range(len(vor.point_region)): if r ==",
"r in range(len(vor.point_region)): if r == 12: pass else: region = vor.regions[vor.point_region[r]] if",
"= tf.meshgrid(x, y) concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X, [-1])])) prob = dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0],",
"plot in Axes. name: Python `str` name dataset name for title display. name:",
"in TF docs.\"\"\" red = '#F15854' blue = '#5DA5DA' orange = '#FAA43A' green",
"= qdist.weights minima = min(speed) maxima = max(speed) norm = colors.Normalize(vmin=minima, vmax=maxima, clip=True)",
"name='MC VI Multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if abscissa == 'time': xlabel =",
"colors from scipy.spatial import Voronoi, voronoi_plot_2d from scipy.signal import savgol_filter from qvi.core.experiments import",
"elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:], c=c, label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c, label=name, alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax,",
"self.red, self.orange, self.green, self.blue, self.pink, self.brown, self.purple, self.yellow, self.gray, ][i % 9] TFColor",
"facecolor=color) def scatter_plot_voronoi(qdist,n, ax, title=''): q_samples = qdist.sample(n) speed = qdist.weights minima =",
"ax, label=None, log_scale=False,color='red', coeff=1, norm=1, lw=.7,**kwargs): window_length_mean = 51 window_length_var = 51 mean",
"Axes. If None, one would be created. kwargs: key word argument for the",
"mesh_count) X, Y = tf.meshgrid(x, y) concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X, [-1])])) prob",
"ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title) def plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0, ymin=-4.0, ymax=4.0, mesh_count=1000, name=None): plt.figure() x",
"gradax.set_yscale('symlog') if limits is not None: xlim, ylim = limits elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim)",
"trace, abscissa='time', name='', log_scale=False, c=None, num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1, **kwargs): loss, timestamps, grads =",
"'rqvi' in vi_type: plot(elboax, gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if",
"name: Python `str` name dataset name for title display. name: Iterable of Python",
"label=None, log_scale=False,color='red', coeff=1, norm=1, lw=.7,**kwargs): window_length_mean = 51 window_length_var = 51 mean =",
"markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title) def plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0, ymin=-4.0, ymax=4.0, mesh_count=1000, name=None): plt.figure()",
"edgecolor=edgecolor, facecolor=color) ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) else: ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs)",
"abscissa == 'epochs': x = np.arange(0, len(loss)) grads = tf.reduce_sum(tf.square(grads), axis=-1)/norm if log_scale",
"The right-most index is the dimension index. ax: Matplotlib Axes. If None, one",
"log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'mc' in vi_type: plot_multiple_traces(elboax,",
"'^' every = 70 if 'all' in vi_type or 'rqvi' in vi_type: plot(elboax,",
"in region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1], 'ko', markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title) def plot_heatmap_2d(dist,",
"rqmc_ls = 'v' qvi_ls = '' rqvi_ls = '.' mc_ls = '^' every",
"0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count], [xmin, xmin/2, 0, xmax/2, xmax])",
"ax: Matplotlib Axes. If None, one would be created. kwargs: key word argument",
"gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type",
"qvi_ls = '' rqvi_ls = '.' mc_ls = '^' every = 70 if",
"xmax=4.0, ymin=-4.0, ymax=4.0, mesh_count=1000, name=None): plt.figure() x = tf.linspace(xmin, xmax, mesh_count) y =",
"ax = plt.subplot() if ax is None else ax X = array[:, 0]",
"axes[line+1, k] qmc_ls = 'x' rqmc_ls = 'v' qvi_ls = '' rqvi_ls =",
"def plot(elboax, gradax, trace, abscissa='time', name='', log_scale=False, c=None, num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1, **kwargs): loss,",
"**kwargs): \"\"\"Plotting MCVI, QVI and RQVI Experiment Args: experiments: Iterable of Experiment instances.",
"grads = tf.math.log(grads) loss = tf.math.log(loss) elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:], c=c, label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:],",
"= tf.reduce_mean(ts/norm, axis=-1) variance = tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean = savgol_filter(mean, window_length_mean, 2) smoothed_variance",
"norm=1, lw=.7,**kwargs): window_length_mean = 51 window_length_var = 51 mean = tf.reduce_mean(ts/norm, axis=-1) variance",
"abscissa == 'epochs': x = np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax, label='MCVI', log_scale=log_scale, coeff=-1,",
"c=color, label=label, lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor, facecolor=color) def scatter_plot_voronoi(qdist,n, ax, title=''):",
"scipy.signal import savgol_filter from qvi.core.experiments import compute_traces_from_multiple_trainning class _TFColor(object): \"\"\"Enum of colors used",
"cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor = Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False, show_vertices=False, s=1,ax=ax) for r in range(len(vor.point_region)):",
"`str`: 'All', 'mc', 'QVI', 'RQVI'. abscissa: Python `str`: `time` or `epochs`. num_burnin_steps: Python",
"= np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax, label='MCVI', log_scale=log_scale, coeff=-1, color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:],",
"Iterable of Experiment instances. axes: Matplotlib Axe. dataset: Dataset Number. Must be comptatible",
"import pdb #pdb.set_trace() edgecolor = '#CC4F1B' alpha = .6 if log_scale is True:",
"TFColor = _TFColor() def plot2d(array, ax=None, **kwargs): ''' Two dimension plot of an",
"import tensorflow as tf import numpy as np import matplotlib.pyplot as plt from",
"qdist.sample(n) speed = qdist.weights minima = min(speed) maxima = max(speed) norm = colors.Normalize(vmin=minima,",
"log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qvi' in vi_type: plot(elboax, gradax,",
"range(len(vor.point_region)): if r == 12: pass else: region = vor.regions[vor.point_region[r]] if not -1",
"ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor, facecolor=color) ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) else: ax.plot(x,",
"vor = Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False, show_vertices=False, s=1,ax=ax) for r in range(len(vor.point_region)): if r",
"name='', log_scale=False, c=None, num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1, **kwargs): loss, timestamps, grads = trace if",
"abscissa=abscissa, name='MC VI Multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if abscissa == 'time': xlabel",
"* 0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count], [ymin, ymin/2, 0, ymax/2,",
"import matplotlib.pyplot as plt from matplotlib import cm from matplotlib import colors from",
"class _TFColor(object): \"\"\"Enum of colors used in TF docs.\"\"\" red = '#F15854' blue",
"Number. Must be comptatible with axes. limites: Python `tuple` for xlim and ylim",
"= cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor = Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False, show_vertices=False, s=1,ax=ax) for r in",
"tf.linspace(xmin, xmax, mesh_count) y = tf.linspace(ymin, ymax, mesh_count) X, Y = tf.meshgrid(x, y)",
"= '#DECF3F' gray = '#4D4D4D' qmc = '#33a02c' rqmc = '#fdae61' qvi =",
"self.gray, ][i % 9] TFColor = _TFColor() def plot2d(array, ax=None, **kwargs): ''' Two",
"Y = array[:, 1] ax.scatter(X, Y, **kwargs) def plot_experiments(experiments, axes, dataset, limits=None, gradylimit=None,",
"_ = limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if k != 0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both',",
"gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if k != 0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both', length=0) elboax.tick_params(axis=u'both', which=u'both',",
"c=c, label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c, label=name, alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax, gradax, traces, abscissa='time',",
"* 0.5, mesh_count * 0.75, mesh_count], [ymin, ymin/2, 0, ymax/2, ymax]) if name:",
"= '#B276B2' yellow = '#DECF3F' gray = '#4D4D4D' qmc = '#33a02c' rqmc =",
"'time': x = timestamps - timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0,",
"from matplotlib import colors from scipy.spatial import Voronoi, voronoi_plot_2d from scipy.signal import savgol_filter",
"0.5, mesh_count * 0.75, mesh_count], [ymin, ymin/2, 0, ymax/2, ymax]) if name: plt.savefig(name",
"== 12: pass else: region = vor.regions[vor.point_region[r]] if not -1 in region: polygon",
"concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X, [-1])])) prob = dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow')",
"if 'all' in vi_type or 'rqmc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa,",
"= '#4D4D4D' qmc = '#33a02c' rqmc = '#fdae61' qvi = '#2c7bb6' rqvi =",
"def plot_experiments(experiments, axes, dataset, limits=None, gradylimit=None, dataset_name=None, vi_type=['all'], abscissa='epochs', num_burnin_steps=0, norm=1, **kwargs): \"\"\"Plotting",
"self.blue, self.pink, self.brown, self.purple, self.yellow, self.gray, ][i % 9] TFColor = _TFColor() def",
"window_length_mean = 51 window_length_var = 51 mean = tf.reduce_mean(ts/norm, axis=-1) variance = tf.math.reduce_std(ts/norm,",
"norm=norm,**kwargs) if 'all' in vi_type or 'rqmc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every,",
"num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1, **kwargs): loss, timestamps, grads = trace if abscissa == 'time':",
"smoothed_mean = savgol_filter(mean, window_length_mean, 2) smoothed_variance = savgol_filter(variance, window_length_var, 3) import pdb #pdb.set_trace()",
"x = tf.linspace(xmin, xmax, mesh_count) y = tf.linspace(ymin, ymax, mesh_count) X, Y =",
"def mean_var_ts(ts, x, ax, label=None, log_scale=False,color='red', coeff=1, norm=1, lw=.7,**kwargs): window_length_mean = 51 window_length_var",
"<gh_stars>0 import tensorflow as tf import numpy as np import matplotlib.pyplot as plt",
"loss[num_burnin_steps:], c=c, label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c, label=name, alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax, gradax, traces,",
"else: region = vor.regions[vor.point_region[r]] if not -1 in region: polygon = [vor.vertices[i] for",
"log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts, x, ax, label=None, log_scale=False,color='red', coeff=1, norm=1, lw=.7,**kwargs): window_length_mean = 51",
"'.' mc_ls = '^' every = 70 if 'all' in vi_type or 'rqvi'",
"return [ self.red, self.orange, self.green, self.blue, self.pink, self.brown, self.purple, self.yellow, self.gray, ][i %",
"display. \"\"\" for k, experiment in enumerate(sorted(experiments, key=lambda e: e.optimizer_params['learning_rate'])): line = 2*dataset",
"= '#d7191c' def __getitem__(self, i): return [ self.red, self.orange, self.green, self.blue, self.pink, self.brown,",
"= '#F17CB0' brown = '#B2912F' purple = '#B276B2' yellow = '#DECF3F' gray =",
"for the scatter plot. ''' ax = plt.subplot() if ax is None else",
"plot(elboax, gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type",
"mesh_count * 0.75, mesh_count], [ymin, ymin/2, 0, ymax/2, ymax]) if name: plt.savefig(name +",
"maxima = max(speed) norm = colors.Normalize(vmin=minima, vmax=maxima, clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor",
"51 mean = tf.reduce_mean(ts/norm, axis=-1) variance = tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean = savgol_filter(mean, window_length_mean,",
"elboax.tick_params(axis=u'both', which=u'both', length=0) if k == 0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate']))",
"e: e.optimizer_params['learning_rate'])): line = 2*dataset elboax = axes[line, k] gradax = axes[line+1, k]",
"'all' in vi_type or 'rqvi' in vi_type: plot(elboax, gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False,",
"__getitem__(self, i): return [ self.red, self.orange, self.green, self.blue, self.pink, self.brown, self.purple, self.yellow, self.gray,",
"elboax.set_yscale('symlog') gradax.set_yscale('symlog') if limits is not None: xlim, ylim = limits elboax.set_xlim(xlim) gradax.set_xlim(xlim)",
"coeff=-1, color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts, x, ax, label=None,",
"''' Two dimension plot of an array. Args: array: 2D array. The right-most",
"xlabel = None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog') if limits is not None: xlim, ylim",
"'#fdae61' qvi = '#2c7bb6' rqvi = '#abd9e9' mc = '#d7191c' def __getitem__(self, i):",
"be comptatible with axes. limites: Python `tuple` for xlim and ylim for each",
"Python `str`: `time` or `epochs`. num_burnin_steps: Python `int`: Number of step to ignore",
"self.orange, self.green, self.blue, self.pink, self.brown, self.purple, self.yellow, self.gray, ][i % 9] TFColor =",
"= tf.linspace(xmin, xmax, mesh_count) y = tf.linspace(ymin, ymax, mesh_count) X, Y = tf.meshgrid(x,",
"plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC VI Multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if abscissa",
"plot. ''' ax = plt.subplot() if ax is None else ax X =",
"facecolor=color) ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) else: ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) ax.fill_between(x,",
"color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts, x, ax, label=None, log_scale=False,color='red',",
"tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean = savgol_filter(mean, window_length_mean, 2) smoothed_variance = savgol_filter(variance, window_length_var, 3) import",
"from qvi.core.experiments import compute_traces_from_multiple_trainning class _TFColor(object): \"\"\"Enum of colors used in TF docs.\"\"\"",
"x=x[num_burnin_steps:], ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts, x, ax, label=None, log_scale=False,color='red', coeff=1, norm=1, lw=.7,**kwargs):",
"array: 2D array. The right-most index is the dimension index. ax: Matplotlib Axes.",
"plt from matplotlib import cm from matplotlib import colors from scipy.spatial import Voronoi,",
"if log_scale is True: logmean = tf.math.log(smoothed_mean) logvariance = smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance),",
"Axe. dataset: Dataset Number. Must be comptatible with axes. limites: Python `tuple` for",
"limits elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim) if gradylimit is not None: xlim, _ = limits",
"length=0) if k == 0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax,",
"self.yellow, self.gray, ][i % 9] TFColor = _TFColor() def plot2d(array, ax=None, **kwargs): '''",
"'all' in vi_type or 'qmc' in vi_type: plot(elboax, gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False,",
"vi_type: plot(elboax, gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in",
"= tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean = savgol_filter(mean, window_length_mean, 2) smoothed_variance = savgol_filter(variance, window_length_var, 3)",
"c=color, label=label, lw=lw,**kwargs) else: ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha,",
"== 'epochs': x = np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax, label='MCVI', log_scale=log_scale, coeff=-1, color=color,",
"'mc', 'QVI', 'RQVI'. abscissa: Python `str`: `time` or `epochs`. num_burnin_steps: Python `int`: Number",
"c=c, label=name, alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax, gradax, traces, abscissa='time', name='', log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs): losses,",
"norm=norm,**kwargs) if 'all' in vi_type or 'qvi' in vi_type: plot(elboax, gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every,",
"axes[line, k] gradax = axes[line+1, k] qmc_ls = 'x' rqmc_ls = 'v' qvi_ls",
"savgol_filter from qvi.core.experiments import compute_traces_from_multiple_trainning class _TFColor(object): \"\"\"Enum of colors used in TF",
"== 'time': xlabel = 'time(s)' elif abscissa == 'epochs': xlabel = 'iterations' xlabel",
"= None else: xlabel = None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog') if limits is not",
"'#abd9e9' mc = '#d7191c' def __getitem__(self, i): return [ self.red, self.orange, self.green, self.blue,",
"cmap=cm.Reds) vor = Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False, show_vertices=False, s=1,ax=ax) for r in range(len(vor.point_region)): if",
"= tf.math.log(loss) elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:], c=c, label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c, label=name, alpha=gradaxalpha,**kwargs)",
"ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor, facecolor=color) def scatter_plot_voronoi(qdist,n, ax, title=''): q_samples = qdist.sample(n)",
"None else ax X = array[:, 0] Y = array[:, 1] ax.scatter(X, Y,",
"dataset name for title display. name: Iterable of Python `str`: 'All', 'mc', 'QVI',",
"not None: xlim, _ = limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if k != 0: gradax.set_yticks(ticks=[])",
"Voronoi, voronoi_plot_2d from scipy.signal import savgol_filter from qvi.core.experiments import compute_traces_from_multiple_trainning class _TFColor(object): \"\"\"Enum",
"e.optimizer_params['learning_rate'])): line = 2*dataset elboax = axes[line, k] gradax = axes[line+1, k] qmc_ls",
"name='QVI', log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'mc' in vi_type:",
"mc_ls = '^' every = 70 if 'all' in vi_type or 'rqvi' in",
"[-1])])) prob = dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow') prob = prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count,",
"'all' in vi_type or 'rqmc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC",
"key=lambda e: e.optimizer_params['learning_rate'])): line = 2*dataset elboax = axes[line, k] gradax = axes[line+1,",
"* 0.5, mesh_count * 0.75, mesh_count], [xmin, xmin/2, 0, xmax/2, xmax]) plt.yticks([0, mesh_count",
"in region: polygon = [vor.vertices[i] for i in region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1],",
"log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if abscissa == 'time': xlabel = 'time(s)' elif abscissa",
"'#CC4F1B' alpha = .6 if log_scale is True: logmean = tf.math.log(smoothed_mean) logvariance =",
"[vor.vertices[i] for i in region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1], 'ko', markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False)",
"show_vertices=False, s=1,ax=ax) for r in range(len(vor.point_region)): if r == 12: pass else: region",
"timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax, label='MCVI',",
"axis=-1)/norm if log_scale is True: grads = tf.math.log(grads) loss = tf.math.log(loss) elboax.plot(x[num_burnin_steps:], -",
"= plt.subplot() if ax is None else ax X = array[:, 0] Y",
"- timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax,",
"compute_traces_from_multiple_trainning class _TFColor(object): \"\"\"Enum of colors used in TF docs.\"\"\" red = '#F15854'",
"= 'v' qvi_ls = '' rqvi_ls = '.' mc_ls = '^' every =",
"vi_type or 'rqvi' in vi_type: plot(elboax, gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps,",
"multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qvi' in vi_type: plot(elboax,",
"= '#33a02c' rqmc = '#fdae61' qvi = '#2c7bb6' rqvi = '#abd9e9' mc =",
"plt.subplot() if ax is None else ax X = array[:, 0] Y =",
"= vor.regions[vor.point_region[r]] if not -1 in region: polygon = [vor.vertices[i] for i in",
"tf.math.log(grads) loss = tf.math.log(loss) elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:], c=c, label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c,",
"ax.scatter(X, Y, **kwargs) def plot_experiments(experiments, axes, dataset, limits=None, gradylimit=None, dataset_name=None, vi_type=['all'], abscissa='epochs', num_burnin_steps=0,",
"q_samples = qdist.sample(n) speed = qdist.weights minima = min(speed) maxima = max(speed) norm",
"compute_traces_from_multiple_trainning(traces) if abscissa == 'time': x = timestamps - timestamps[num_burnin_steps] elif abscissa ==",
"* 0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count], [xmin, xmin/2, 0, xmax/2,",
"\"\"\"Enum of colors used in TF docs.\"\"\" red = '#F15854' blue = '#5DA5DA'",
"= '#abd9e9' mc = '#d7191c' def __getitem__(self, i): return [ self.red, self.orange, self.green,",
"= '#F15854' blue = '#5DA5DA' orange = '#FAA43A' green = '#60BD68' pink =",
"= '#fdae61' qvi = '#2c7bb6' rqvi = '#abd9e9' mc = '#d7191c' def __getitem__(self,",
"dimension plot of an array. Args: array: 2D array. The right-most index is",
"= tf.reduce_sum(tf.square(grads), axis=-1)/norm if log_scale is True: grads = tf.math.log(grads) loss = tf.math.log(loss)",
"of an array. Args: array: 2D array. The right-most index is the dimension",
"QVI and RQVI Experiment Args: experiments: Iterable of Experiment instances. axes: Matplotlib Axe.",
"mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax, label='MCVI', log_scale=log_scale, coeff=-1, color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs)",
"import cm from matplotlib import colors from scipy.spatial import Voronoi, voronoi_plot_2d from scipy.signal",
"plt.figure() x = tf.linspace(xmin, xmax, mesh_count) y = tf.linspace(ymin, ymax, mesh_count) X, Y",
"timestamps, grads = compute_traces_from_multiple_trainning(traces) if abscissa == 'time': x = timestamps - timestamps[num_burnin_steps]",
"alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c, label=name, alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax, gradax, traces, abscissa='time', name='', log_scale=False,",
"ax X = array[:, 0] Y = array[:, 1] ax.scatter(X, Y, **kwargs) def",
"None, one would be created. kwargs: key word argument for the scatter plot.",
"= 'x' rqmc_ls = 'v' qvi_ls = '' rqvi_ls = '.' mc_ls =",
"timestamps - timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:],",
"red = '#F15854' blue = '#5DA5DA' orange = '#FAA43A' green = '#60BD68' pink",
"vi_type or 'mc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC VI Multiple', log_scale=True,",
"c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'rqmc' in vi_type: plot_multiple_traces(elboax, gradax,",
"name: Iterable of Python `str`: 'All', 'mc', 'QVI', 'RQVI'. abscissa: Python `str`: `time`",
"= None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog') if limits is not None: xlim, ylim =",
"k != 0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both', length=0) elboax.tick_params(axis=u'both', which=u'both', length=0) if k",
"key word argument for the scatter plot. ''' ax = plt.subplot() if ax",
"= savgol_filter(mean, window_length_mean, 2) smoothed_variance = savgol_filter(variance, window_length_var, 3) import pdb #pdb.set_trace() edgecolor",
"is True: logmean = tf.math.log(smoothed_mean) logvariance = smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor,",
"np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax, label='MCVI', log_scale=log_scale, coeff=-1, color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax,",
"in vi_type or 'rqmc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC multiple',",
"elif abscissa == 'epochs': x = np.arange(0, len(loss)) grads = tf.reduce_sum(tf.square(grads), axis=-1)/norm if",
"coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor, facecolor=color) def scatter_plot_voronoi(qdist,n, ax,",
"_TFColor() def plot2d(array, ax=None, **kwargs): ''' Two dimension plot of an array. Args:",
"brown = '#B2912F' purple = '#B276B2' yellow = '#DECF3F' gray = '#4D4D4D' qmc",
"is True: grads = tf.math.log(grads) loss = tf.math.log(loss) elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:], c=c, label=name,",
"alpha = .6 if log_scale is True: logmean = tf.math.log(smoothed_mean) logvariance = smoothed_variance/smoothed_mean",
"mesh_count * 0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count], [xmin, xmin/2, 0,",
"elboax = axes[line, k] gradax = axes[line+1, k] qmc_ls = 'x' rqmc_ls =",
"logmean = tf.math.log(smoothed_mean) logvariance = smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor, facecolor=color) ax.plot(x,",
"#pdb.set_trace() edgecolor = '#CC4F1B' alpha = .6 if log_scale is True: logmean =",
"Must be comptatible with axes. limites: Python `tuple` for xlim and ylim for",
"if k == 0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax, gradax,",
"window_length_var = 51 mean = tf.reduce_mean(ts/norm, axis=-1) variance = tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean =",
"ax, title=''): q_samples = qdist.sample(n) speed = qdist.weights minima = min(speed) maxima =",
"= '#B2912F' purple = '#B276B2' yellow = '#DECF3F' gray = '#4D4D4D' qmc =",
"self.purple, self.yellow, self.gray, ][i % 9] TFColor = _TFColor() def plot2d(array, ax=None, **kwargs):",
"2D array. The right-most index is the dimension index. ax: Matplotlib Axes. If",
"array[:, 1] ax.scatter(X, Y, **kwargs) def plot_experiments(experiments, axes, dataset, limits=None, gradylimit=None, dataset_name=None, vi_type=['all'],",
"cmap='rainbow') prob = prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))), origin=\"lower\") plt.xticks([0, mesh_count * 0.25, mesh_count",
"**kwargs): ''' Two dimension plot of an array. Args: array: 2D array. The",
"= colors.Normalize(vmin=minima, vmax=maxima, clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor = Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False,",
"pass else: region = vor.regions[vor.point_region[r]] if not -1 in region: polygon = [vor.vertices[i]",
"in vi_type: plot(elboax, gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs) if 'all'",
"'v' qvi_ls = '' rqvi_ls = '.' mc_ls = '^' every = 70",
"* 0.75, mesh_count], [xmin, xmin/2, 0, xmax/2, xmax]) plt.yticks([0, mesh_count * 0.25, mesh_count",
"= tf.math.log(grads) loss = tf.math.log(loss) elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:], c=c, label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:],",
"for display. \"\"\" for k, experiment in enumerate(sorted(experiments, key=lambda e: e.optimizer_params['learning_rate'])): line =",
"yellow = '#DECF3F' gray = '#4D4D4D' qmc = '#33a02c' rqmc = '#fdae61' qvi",
"1] ax.scatter(X, Y, **kwargs) def plot_experiments(experiments, axes, dataset, limits=None, gradylimit=None, dataset_name=None, vi_type=['all'], abscissa='epochs',",
"xmin/2, 0, xmax/2, xmax]) plt.yticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count *",
"smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor, facecolor=color) ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) else:",
"= smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor, facecolor=color) ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs)",
"ax.title.set_text(title) def plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0, ymin=-4.0, ymax=4.0, mesh_count=1000, name=None): plt.figure() x = tf.linspace(xmin,",
"qvi.core.experiments import compute_traces_from_multiple_trainning class _TFColor(object): \"\"\"Enum of colors used in TF docs.\"\"\" red",
"x = timestamps - timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0, len(loss))",
"def __getitem__(self, i): return [ self.red, self.orange, self.green, self.blue, self.pink, self.brown, self.purple, self.yellow,",
"s=1,ax=ax) for r in range(len(vor.point_region)): if r == 12: pass else: region =",
"losses, timestamps, grads = compute_traces_from_multiple_trainning(traces) if abscissa == 'time': x = timestamps -",
"elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax, gradax, trace, abscissa='time', name='', log_scale=False,",
"label=label, lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor, facecolor=color) def scatter_plot_voronoi(qdist,n, ax, title=''): q_samples",
"'#F15854' blue = '#5DA5DA' orange = '#FAA43A' green = '#60BD68' pink = '#F17CB0'",
"gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or",
"mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts, x, ax, label=None, log_scale=False,color='red', coeff=1, norm=1,",
"= min(speed) maxima = max(speed) norm = colors.Normalize(vmin=minima, vmax=maxima, clip=True) mapper = cm.ScalarMappable(norm=norm,",
"def plot2d(array, ax=None, **kwargs): ''' Two dimension plot of an array. Args: array:",
"12: pass else: region = vor.regions[vor.point_region[r]] if not -1 in region: polygon =",
"None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog') if limits is not None: xlim, ylim = limits",
"as np import matplotlib.pyplot as plt from matplotlib import cm from matplotlib import",
"with axes. limites: Python `tuple` for xlim and ylim for each plot in",
"limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if k != 0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both', length=0) elboax.tick_params(axis=u'both',",
"max(speed) norm = colors.Normalize(vmin=minima, vmax=maxima, clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor = Voronoi(q_samples.numpy())",
"abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'rqmc' in",
"x, ax, label=None, log_scale=False,color='red', coeff=1, norm=1, lw=.7,**kwargs): window_length_mean = 51 window_length_var = 51",
"Y, **kwargs) def plot_experiments(experiments, axes, dataset, limits=None, gradylimit=None, dataset_name=None, vi_type=['all'], abscissa='epochs', num_burnin_steps=0, norm=1,",
"variance = tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean = savgol_filter(mean, window_length_mean, 2) smoothed_variance = savgol_filter(variance, window_length_var,",
"**kwargs) def plot_experiments(experiments, axes, dataset, limits=None, gradylimit=None, dataset_name=None, vi_type=['all'], abscissa='epochs', num_burnin_steps=0, norm=1, **kwargs):",
"ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts, x, ax, label=None, log_scale=False,color='red', coeff=1, norm=1, lw=.7,**kwargs): window_length_mean",
"= axes[line+1, k] qmc_ls = 'x' rqmc_ls = 'v' qvi_ls = '' rqvi_ls",
"plt.yticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count], [ymin, ymin/2,",
"Python `tuple` for xlim and ylim for each plot in Axes. name: Python",
"gray = '#4D4D4D' qmc = '#33a02c' rqmc = '#fdae61' qvi = '#2c7bb6' rqvi",
"abscissa='epochs', num_burnin_steps=0, norm=1, **kwargs): \"\"\"Plotting MCVI, QVI and RQVI Experiment Args: experiments: Iterable",
"mesh_count * 0.5, mesh_count * 0.75, mesh_count], [ymin, ymin/2, 0, ymax/2, ymax]) if",
"= dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow') prob = prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))), origin=\"lower\")",
"lw=.7,**kwargs): window_length_mean = 51 window_length_var = 51 mean = tf.reduce_mean(ts/norm, axis=-1) variance =",
"ax=None, **kwargs): ''' Two dimension plot of an array. Args: array: 2D array.",
"'qvi' in vi_type: plot(elboax, gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if",
"step to ignore for display. \"\"\" for k, experiment in enumerate(sorted(experiments, key=lambda e:",
"and RQVI Experiment Args: experiments: Iterable of Experiment instances. axes: Matplotlib Axe. dataset:",
"loss = tf.math.log(loss) elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:], c=c, label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c, label=name,",
"mesh_count * 0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count], [ymin, ymin/2, 0,",
"comptatible with axes. limites: Python `tuple` for xlim and ylim for each plot",
"dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow') prob = prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))), origin=\"lower\") plt.xticks([0,",
"elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim) if gradylimit is not None: xlim, _ = limits gradax.set_xlim(xlim)",
"if log_scale is True: grads = tf.math.log(grads) loss = tf.math.log(loss) elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:],",
"0.5, mesh_count * 0.75, mesh_count], [xmin, xmin/2, 0, xmax/2, xmax]) plt.yticks([0, mesh_count *",
"alpha=alpha, edgecolor=edgecolor, facecolor=color) ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) else: ax.plot(x, coeff*smoothed_mean, c=color, label=label,",
"in range(len(vor.point_region)): if r == 12: pass else: region = vor.regions[vor.point_region[r]] if not",
"would be created. kwargs: key word argument for the scatter plot. ''' ax",
"coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor, facecolor=color) ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) else: ax.plot(x, coeff*smoothed_mean,",
"tf.reshape(X, [-1])])) prob = dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow') prob = prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob,",
"plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1], 'ko', markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title) def plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0,",
"= '.' mc_ls = '^' every = 70 if 'all' in vi_type or",
"vi_type or 'qmc' in vi_type: plot(elboax, gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6,",
"tf.reduce_sum(tf.square(grads), axis=-1)/norm if log_scale is True: grads = tf.math.log(grads) loss = tf.math.log(loss) elboax.plot(x[num_burnin_steps:],",
"= trace if abscissa == 'time': x = timestamps - timestamps[num_burnin_steps] elif abscissa",
"lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor, facecolor=color) def scatter_plot_voronoi(qdist,n, ax, title=''): q_samples =",
"or 'rqmc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC multiple', log_scale=True, num_burnin_steps=num_burnin_steps,",
"vmax=maxima, clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor = Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False, show_vertices=False, s=1,ax=ax)",
"tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X, [-1])])) prob = dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow') prob =",
"each plot in Axes. name: Python `str` name dataset name for title display.",
"xmax, mesh_count) y = tf.linspace(ymin, ymax, mesh_count) X, Y = tf.meshgrid(x, y) concatenated_mesh_coordinates",
"If None, one would be created. kwargs: key word argument for the scatter",
"i in region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1], 'ko', markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title) def",
"'QVI', 'RQVI'. abscissa: Python `str`: `time` or `epochs`. num_burnin_steps: Python `int`: Number of",
"abscissa == 'time': xlabel = 'time(s)' elif abscissa == 'epochs': xlabel = 'iterations'",
"axes: Matplotlib Axe. dataset: Dataset Number. Must be comptatible with axes. limites: Python",
"blue = '#5DA5DA' orange = '#FAA43A' green = '#60BD68' pink = '#F17CB0' brown",
"experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qmc'",
"abscissa=abscissa, name='RQMC multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qvi' in",
"num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'mc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every,",
"Dataset Number. Must be comptatible with axes. limites: Python `tuple` for xlim and",
"argument for the scatter plot. ''' ax = plt.subplot() if ax is None",
"#plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow') prob = prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))), origin=\"lower\") plt.xticks([0, mesh_count",
"= [vor.vertices[i] for i in region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1], 'ko', markersize=2) ax.get_xaxis().set_visible(False)",
"Python `str`: 'All', 'mc', 'QVI', 'RQVI'. abscissa: Python `str`: `time` or `epochs`. num_burnin_steps:",
"'#d7191c' def __getitem__(self, i): return [ self.red, self.orange, self.green, self.blue, self.pink, self.brown, self.purple,",
"MCVI, QVI and RQVI Experiment Args: experiments: Iterable of Experiment instances. axes: Matplotlib",
"None: xlim, _ = limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if k != 0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[])",
"[xmin, xmin/2, 0, xmax/2, xmax]) plt.yticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count",
"experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'rqmc'",
"word argument for the scatter plot. ''' ax = plt.subplot() if ax is",
"ax=elboax, label='MCVI', log_scale=log_scale, coeff=-1, color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts,",
"import compute_traces_from_multiple_trainning class _TFColor(object): \"\"\"Enum of colors used in TF docs.\"\"\" red =",
"experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC VI Multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if abscissa == 'time':",
"'#B2912F' purple = '#B276B2' yellow = '#DECF3F' gray = '#4D4D4D' qmc = '#33a02c'",
"prob = dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow') prob = prob.numpy() plt.imshow(tf.transpose(tf.reshape(prob, (mesh_count, mesh_count))),",
"- timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0, len(loss)) grads = tf.reduce_sum(tf.square(grads),",
"Y = tf.meshgrid(x, y) concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X, [-1])])) prob = dist.prob(concatenated_mesh_coordinates)",
"= timestamps - timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:],",
"0.75, mesh_count], [ymin, ymin/2, 0, ymax/2, ymax]) if name: plt.savefig(name + \".png\", format=\"png\")",
"'#5DA5DA' orange = '#FAA43A' green = '#60BD68' pink = '#F17CB0' brown = '#B2912F'",
"= '' rqvi_ls = '.' mc_ls = '^' every = 70 if 'all'",
"plot(elboax, gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type",
"traces, abscissa='time', name='', log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs): losses, timestamps, grads = compute_traces_from_multiple_trainning(traces) if abscissa",
"import colors from scipy.spatial import Voronoi, voronoi_plot_2d from scipy.signal import savgol_filter from qvi.core.experiments",
"coeff=1, norm=1, lw=.7,**kwargs): window_length_mean = 51 window_length_var = 51 mean = tf.reduce_mean(ts/norm, axis=-1)",
"= 2*dataset elboax = axes[line, k] gradax = axes[line+1, k] qmc_ls = 'x'",
"'All', 'mc', 'QVI', 'RQVI'. abscissa: Python `str`: `time` or `epochs`. num_burnin_steps: Python `int`:",
"coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor, facecolor=color) ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) else: ax.plot(x, coeff*smoothed_mean, c=color,",
"= 'time(s)' elif abscissa == 'epochs': xlabel = 'iterations' xlabel = None else:",
"gradaxalpha=1,norm=1, **kwargs): loss, timestamps, grads = trace if abscissa == 'time': x =",
"Number of step to ignore for display. \"\"\" for k, experiment in enumerate(sorted(experiments,",
"if abscissa == 'time': x = timestamps - timestamps[num_burnin_steps] elif abscissa == 'epochs':",
"plot(elboax, gradax, trace, abscissa='time', name='', log_scale=False, c=None, num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1, **kwargs): loss, timestamps,",
"True: logmean = tf.math.log(smoothed_mean) logvariance = smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha, edgecolor=edgecolor, facecolor=color)",
"vi_type: plot(elboax, gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if 'all' in",
"color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qvi'",
".6 if log_scale is True: logmean = tf.math.log(smoothed_mean) logvariance = smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance),",
"one would be created. kwargs: key word argument for the scatter plot. '''",
"axis=-1) smoothed_mean = savgol_filter(mean, window_length_mean, 2) smoothed_variance = savgol_filter(variance, window_length_var, 3) import pdb",
"x = timestamps - timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0, len(losses))",
"plot(elboax, gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type",
"savgol_filter(mean, window_length_mean, 2) smoothed_variance = savgol_filter(variance, window_length_var, 3) import pdb #pdb.set_trace() edgecolor =",
"9] TFColor = _TFColor() def plot2d(array, ax=None, **kwargs): ''' Two dimension plot of",
"abscissa='time', name='', log_scale=False, c=None, num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1, **kwargs): loss, timestamps, grads = trace",
"name=None): plt.figure() x = tf.linspace(xmin, xmax, mesh_count) y = tf.linspace(ymin, ymax, mesh_count) X,",
"'epochs': x = np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax, label='MCVI', log_scale=log_scale, coeff=-1, color=color, norm=1,**kwargs)",
"label=label, lw=lw,**kwargs) else: ax.plot(x, coeff*smoothed_mean, c=color, label=label, lw=lw,**kwargs) ax.fill_between(x, coeff*smoothed_mean-smoothed_variance, coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor,",
"array[:, 0] Y = array[:, 1] ax.scatter(X, Y, **kwargs) def plot_experiments(experiments, axes, dataset,",
"from scipy.spatial import Voronoi, voronoi_plot_2d from scipy.signal import savgol_filter from qvi.core.experiments import compute_traces_from_multiple_trainning",
"axes. limites: Python `tuple` for xlim and ylim for each plot in Axes.",
"'time(s)' elif abscissa == 'epochs': xlabel = 'iterations' xlabel = None else: xlabel",
"self.pink, self.brown, self.purple, self.yellow, self.gray, ][i % 9] TFColor = _TFColor() def plot2d(array,",
"X, Y = tf.meshgrid(x, y) concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X, [-1])])) prob =",
"Python `str` name dataset name for title display. name: Iterable of Python `str`:",
"experiments: Iterable of Experiment instances. axes: Matplotlib Axe. dataset: Dataset Number. Must be",
"name dataset name for title display. name: Iterable of Python `str`: 'All', 'mc',",
"window_length_var, 3) import pdb #pdb.set_trace() edgecolor = '#CC4F1B' alpha = .6 if log_scale",
"voronoi_plot_2d from scipy.signal import savgol_filter from qvi.core.experiments import compute_traces_from_multiple_trainning class _TFColor(object): \"\"\"Enum of",
"Args: array: 2D array. The right-most index is the dimension index. ax: Matplotlib",
"xmin=-4.0, xmax=4.0, ymin=-4.0, ymax=4.0, mesh_count=1000, name=None): plt.figure() x = tf.linspace(xmin, xmax, mesh_count) y",
"`int`: Number of step to ignore for display. \"\"\" for k, experiment in",
"is not None: xlim, _ = limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if k != 0:",
"import numpy as np import matplotlib.pyplot as plt from matplotlib import cm from",
"'x' rqmc_ls = 'v' qvi_ls = '' rqvi_ls = '.' mc_ls = '^'",
"mesh_count * 0.5, mesh_count * 0.75, mesh_count], [xmin, xmin/2, 0, xmax/2, xmax]) plt.yticks([0,",
"* 0.75, mesh_count], [ymin, ymin/2, 0, ymax/2, ymax]) if name: plt.savefig(name + \".png\",",
"log_scale is True: logmean = tf.math.log(smoothed_mean) logvariance = smoothed_variance/smoothed_mean ax.fill_between(x, coeff*tf.math.exp(logmean-logvariance), coeff*tf.math.exp(logmean+logvariance), alpha=alpha,",
"% 9] TFColor = _TFColor() def plot2d(array, ax=None, **kwargs): ''' Two dimension plot",
"or 'rqvi' in vi_type: plot(elboax, gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI', log_scale=False, c=TFColor.rqvi, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs)",
"xlabel = 'time(s)' elif abscissa == 'epochs': xlabel = 'iterations' xlabel = None",
"origin=\"lower\") plt.xticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count], [xmin,",
"else ax X = array[:, 0] Y = array[:, 1] ax.scatter(X, Y, **kwargs)",
"mesh_count], [xmin, xmin/2, 0, xmax/2, xmax]) plt.yticks([0, mesh_count * 0.25, mesh_count * 0.5,",
"pdb #pdb.set_trace() edgecolor = '#CC4F1B' alpha = .6 if log_scale is True: logmean",
"which=u'both', length=0) if k == 0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def",
"+ r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax, gradax, trace, abscissa='time', name='', log_scale=False, c=None, num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1,",
"or 'qmc' in vi_type: plot(elboax, gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs)",
"mc = '#d7191c' def __getitem__(self, i): return [ self.red, self.orange, self.green, self.blue, self.pink,",
"in vi_type: plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC VI Multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[])",
"None: xlim, ylim = limits elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim) if gradylimit is not None:",
"- loss[num_burnin_steps:], c=c, label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c, label=name, alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax, gradax,",
"k, experiment in enumerate(sorted(experiments, key=lambda e: e.optimizer_params['learning_rate'])): line = 2*dataset elboax = axes[line,",
"import savgol_filter from qvi.core.experiments import compute_traces_from_multiple_trainning class _TFColor(object): \"\"\"Enum of colors used in",
"= limits elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim) if gradylimit is not None: xlim, _ =",
"grads = compute_traces_from_multiple_trainning(traces) if abscissa == 'time': x = timestamps - timestamps[num_burnin_steps] elif",
"plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0, ymin=-4.0, ymax=4.0, mesh_count=1000, name=None): plt.figure() x = tf.linspace(xmin, xmax, mesh_count)",
"qmc = '#33a02c' rqmc = '#fdae61' qvi = '#2c7bb6' rqvi = '#abd9e9' mc",
"= '#5DA5DA' orange = '#FAA43A' green = '#60BD68' pink = '#F17CB0' brown =",
"elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both', length=0) elboax.tick_params(axis=u'both', which=u'both', length=0) if k == 0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$')",
"= _TFColor() def plot2d(array, ax=None, **kwargs): ''' Two dimension plot of an array.",
"Matplotlib Axes. If None, one would be created. kwargs: key word argument for",
"tf.math.log(loss) elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:], c=c, label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c, label=name, alpha=gradaxalpha,**kwargs) def",
"tf.meshgrid(x, y) concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X, [-1])])) prob = dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1],",
"'epochs': x = np.arange(0, len(loss)) grads = tf.reduce_sum(tf.square(grads), axis=-1)/norm if log_scale is True:",
"plot2d(array, ax=None, **kwargs): ''' Two dimension plot of an array. Args: array: 2D",
"dimension index. ax: Matplotlib Axes. If None, one would be created. kwargs: key",
"'epochs': xlabel = 'iterations' xlabel = None else: xlabel = None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog')",
"xmax]) plt.yticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count], [ymin,",
"`epochs`. num_burnin_steps: Python `int`: Number of step to ignore for display. \"\"\" for",
"x = np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax, label='MCVI', log_scale=log_scale, coeff=-1, color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:],",
"tf.linspace(ymin, ymax, mesh_count) X, Y = tf.meshgrid(x, y) concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X,",
"'#FAA43A' green = '#60BD68' pink = '#F17CB0' brown = '#B2912F' purple = '#B276B2'",
"self.brown, self.purple, self.yellow, self.gray, ][i % 9] TFColor = _TFColor() def plot2d(array, ax=None,",
"docs.\"\"\" red = '#F15854' blue = '#5DA5DA' orange = '#FAA43A' green = '#60BD68'",
"in Axes. name: Python `str` name dataset name for title display. name: Iterable",
"2) smoothed_variance = savgol_filter(variance, window_length_var, 3) import pdb #pdb.set_trace() edgecolor = '#CC4F1B' alpha",
"experiment.rqmcvitraces, color=TFColor.rqmc,marker=rqmc_ls,markevery=every, abscissa=abscissa, name='RQMC multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or",
"colors.Normalize(vmin=minima, vmax=maxima, clip=True) mapper = cm.ScalarMappable(norm=norm, cmap=cm.Reds) vor = Voronoi(q_samples.numpy()) voronoi_plot_2d(vor, show_points=False, show_vertices=False,",
"'#B276B2' yellow = '#DECF3F' gray = '#4D4D4D' qmc = '#33a02c' rqmc = '#fdae61'",
"'#60BD68' pink = '#F17CB0' brown = '#B2912F' purple = '#B276B2' yellow = '#DECF3F'",
"gradax.set_ylim(*gradylimit) if k != 0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both', length=0) elboax.tick_params(axis=u'both', which=u'both', length=0)",
"q_samples[:,1], 'ko', markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title) def plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0, ymin=-4.0, ymax=4.0, mesh_count=1000,",
"window_length_mean, 2) smoothed_variance = savgol_filter(variance, window_length_var, 3) import pdb #pdb.set_trace() edgecolor = '#CC4F1B'",
"0: elboax.set_ylabel('ELBO') gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax, gradax, trace, abscissa='time', name='',",
"70 if 'all' in vi_type or 'rqvi' in vi_type: plot(elboax, gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every,",
"vi_type or 'qvi' in vi_type: plot(elboax, gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6,",
"title=''): q_samples = qdist.sample(n) speed = qdist.weights minima = min(speed) maxima = max(speed)",
"label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts, x, ax, label=None, log_scale=False,color='red', coeff=1, norm=1, lw=.7,**kwargs): window_length_mean =",
"axes, dataset, limits=None, gradylimit=None, dataset_name=None, vi_type=['all'], abscissa='epochs', num_burnin_steps=0, norm=1, **kwargs): \"\"\"Plotting MCVI, QVI",
"mesh_count))), origin=\"lower\") plt.xticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count],",
"plot_multiple_traces(elboax, gradax, traces, abscissa='time', name='', log_scale=False, num_burnin_steps=0,color='red', norm=1,**kwargs): losses, timestamps, grads = compute_traces_from_multiple_trainning(traces)",
"if 'all' in vi_type or 'mc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC",
"for title display. name: Iterable of Python `str`: 'All', 'mc', 'QVI', 'RQVI'. abscissa:",
"created. kwargs: key word argument for the scatter plot. ''' ax = plt.subplot()",
"if 'all' in vi_type or 'qmc' in vi_type: plot(elboax, gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI',",
"experiment in enumerate(sorted(experiments, key=lambda e: e.optimizer_params['learning_rate'])): line = 2*dataset elboax = axes[line, k]",
"num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qmc' in vi_type: plot(elboax, gradax, experiment.qmcvitraces,",
"= timestamps - timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0, len(loss)) grads",
"TF docs.\"\"\" red = '#F15854' blue = '#5DA5DA' orange = '#FAA43A' green =",
"r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax, gradax, trace, abscissa='time', name='', log_scale=False, c=None, num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1, **kwargs):",
"norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts, x, ax, label=None, log_scale=False,color='red', coeff=1,",
"2*dataset elboax = axes[line, k] gradax = axes[line+1, k] qmc_ls = 'x' rqmc_ls",
"from matplotlib import cm from matplotlib import colors from scipy.spatial import Voronoi, voronoi_plot_2d",
"3) import pdb #pdb.set_trace() edgecolor = '#CC4F1B' alpha = .6 if log_scale is",
"!= 0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both', length=0) elboax.tick_params(axis=u'both', which=u'both', length=0) if k ==",
"xmax/2, xmax]) plt.yticks([0, mesh_count * 0.25, mesh_count * 0.5, mesh_count * 0.75, mesh_count],",
"grads = trace if abscissa == 'time': x = timestamps - timestamps[num_burnin_steps] elif",
"c=None, num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1, **kwargs): loss, timestamps, grads = trace if abscissa ==",
"of colors used in TF docs.\"\"\" red = '#F15854' blue = '#5DA5DA' orange",
"label='MCVI', log_scale=log_scale, coeff=-1, color=color, norm=1,**kwargs) mean_var_ts(ts=grads[num_burnin_steps:], x=x[num_burnin_steps:], ax=gradax, label='MCVI', log_scale=log_scale,color=color,norm=norm,**kwargs) def mean_var_ts(ts, x,",
"for i in region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0], q_samples[:,1], 'ko', markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title)",
"vi_type: plot(elboax, gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every, name='QMCVI', log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs) if 'all' in",
"k] qmc_ls = 'x' rqmc_ls = 'v' qvi_ls = '' rqvi_ls = '.'",
"'RQVI'. abscissa: Python `str`: `time` or `epochs`. num_burnin_steps: Python `int`: Number of step",
"xlim, _ = limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if k != 0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y',",
"ax.get_yaxis().set_visible(False) ax.title.set_text(title) def plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0, ymin=-4.0, ymax=4.0, mesh_count=1000, name=None): plt.figure() x =",
"== 'time': x = timestamps - timestamps[num_burnin_steps] elif abscissa == 'epochs': x =",
"self.green, self.blue, self.pink, self.brown, self.purple, self.yellow, self.gray, ][i % 9] TFColor = _TFColor()",
"= array[:, 1] ax.scatter(X, Y, **kwargs) def plot_experiments(experiments, axes, dataset, limits=None, gradylimit=None, dataset_name=None,",
"vi_type=['all'], abscissa='epochs', num_burnin_steps=0, norm=1, **kwargs): \"\"\"Plotting MCVI, QVI and RQVI Experiment Args: experiments:",
"= limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if k != 0: gradax.set_yticks(ticks=[]) elboax.set_yticks(ticks=[]) gradax.tick_params(axis=u'y', which=u'both', length=0)",
"num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) if 'all' in vi_type or 'qvi' in vi_type: plot(elboax, gradax, experiment.qvitraces,",
"def plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0, ymin=-4.0, ymax=4.0, mesh_count=1000, name=None): plt.figure() x = tf.linspace(xmin, xmax,",
"scipy.spatial import Voronoi, voronoi_plot_2d from scipy.signal import savgol_filter from qvi.core.experiments import compute_traces_from_multiple_trainning class",
"loss, timestamps, grads = trace if abscissa == 'time': x = timestamps -",
"gradax, experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or",
"label=name, alpha=alpha,**kwargs) gradax.plot(x[num_burnin_steps:], grads[num_burnin_steps:], c=c, label=name, alpha=gradaxalpha,**kwargs) def plot_multiple_traces(elboax, gradax, traces, abscissa='time', name='',",
"is the dimension index. ax: Matplotlib Axes. If None, one would be created.",
"= tf.transpose(tf.stack([tf.reshape(Y, [-1]), tf.reshape(X, [-1])])) prob = dist.prob(concatenated_mesh_coordinates) #plt.hexbin(concatenated_mesh_coordinates[:,0], concatenated_mesh_coordinates[:,1], C=prob, cmap='rainbow') prob",
"rqvi_ls = '.' mc_ls = '^' every = 70 if 'all' in vi_type",
"'iterations' xlabel = None else: xlabel = None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog') if limits",
"mesh_count) y = tf.linspace(ymin, ymax, mesh_count) X, Y = tf.meshgrid(x, y) concatenated_mesh_coordinates =",
"51 window_length_var = 51 mean = tf.reduce_mean(ts/norm, axis=-1) variance = tf.math.reduce_std(ts/norm, axis=-1) smoothed_mean",
"green = '#60BD68' pink = '#F17CB0' brown = '#B2912F' purple = '#B276B2' yellow",
"name for title display. name: Iterable of Python `str`: 'All', 'mc', 'QVI', 'RQVI'.",
"as tf import numpy as np import matplotlib.pyplot as plt from matplotlib import",
"'mc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC VI Multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs)",
"elboax.set_xticks(ticks=[]) if abscissa == 'time': xlabel = 'time(s)' elif abscissa == 'epochs': xlabel",
"timestamps, grads = trace if abscissa == 'time': x = timestamps - timestamps[num_burnin_steps]",
"= qdist.sample(n) speed = qdist.weights minima = min(speed) maxima = max(speed) norm =",
"trace if abscissa == 'time': x = timestamps - timestamps[num_burnin_steps] elif abscissa ==",
"'#4D4D4D' qmc = '#33a02c' rqmc = '#fdae61' qvi = '#2c7bb6' rqvi = '#abd9e9'",
"else: xlabel = None gradax.set_xlabel(xlabel) elboax.set_yscale('symlog') gradax.set_yscale('symlog') if limits is not None: xlim,",
"limits is not None: xlim, ylim = limits elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim) if gradylimit",
"-1 in region: polygon = [vor.vertices[i] for i in region] plt.fill(*zip(*polygon), color=mapper.to_rgba(speed[r])) ax.plot(q_samples[:,0],",
"'all' in vi_type or 'mc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC VI",
"norm=norm,**kwargs) if 'all' in vi_type or 'qmc' in vi_type: plot(elboax, gradax, experiment.qmcvitraces, abscissa=abscissa,marker=qmc_ls,markevery=every,",
"= savgol_filter(variance, window_length_var, 3) import pdb #pdb.set_trace() edgecolor = '#CC4F1B' alpha = .6",
"num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if abscissa == 'time': xlabel = 'time(s)' elif abscissa ==",
"Args: experiments: Iterable of Experiment instances. axes: Matplotlib Axe. dataset: Dataset Number. Must",
"gradax.set_ylabel(r'$\\mathbb{E}|g|_{2}$') elboax.set_title('{}'.format( dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax, gradax, trace, abscissa='time', name='', log_scale=False, c=None,",
"experiment.qvitraces, abscissa=abscissa,marker=qvi_ls,markevery=every, name='QVI', log_scale=False, c=TFColor.qvi, num_burnin_steps=num_burnin_steps,gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'mc'",
"r == 12: pass else: region = vor.regions[vor.point_region[r]] if not -1 in region:",
"kwargs: key word argument for the scatter plot. ''' ax = plt.subplot() if",
"log_scale is True: grads = tf.math.log(grads) loss = tf.math.log(loss) elboax.plot(x[num_burnin_steps:], - loss[num_burnin_steps:], c=c,",
"= 51 window_length_var = 51 mean = tf.reduce_mean(ts/norm, axis=-1) variance = tf.math.reduce_std(ts/norm, axis=-1)",
"gradax, trace, abscissa='time', name='', log_scale=False, c=None, num_burnin_steps=0, alpha=1, gradaxalpha=1,norm=1, **kwargs): loss, timestamps, grads",
"y = tf.linspace(ymin, ymax, mesh_count) X, Y = tf.meshgrid(x, y) concatenated_mesh_coordinates = tf.transpose(tf.stack([tf.reshape(Y,",
"elboax.set_ylim(ylim) if gradylimit is not None: xlim, _ = limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit) if",
"norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if abscissa == 'time': xlabel = 'time(s)' elif abscissa == 'epochs':",
"dataset_name) + r'$(\\alpha=${:.0e})'.format(experiment.optimizer_params['learning_rate'])) def plot(elboax, gradax, trace, abscissa='time', name='', log_scale=False, c=None, num_burnin_steps=0, alpha=1,",
"coeff*smoothed_mean+smoothed_variance, alpha=alpha, edgecolor=edgecolor, facecolor=color) def scatter_plot_voronoi(qdist,n, ax, title=''): q_samples = qdist.sample(n) speed =",
"scatter_plot_voronoi(qdist,n, ax, title=''): q_samples = qdist.sample(n) speed = qdist.weights minima = min(speed) maxima",
"cm from matplotlib import colors from scipy.spatial import Voronoi, voronoi_plot_2d from scipy.signal import",
"if limits is not None: xlim, ylim = limits elboax.set_xlim(xlim) gradax.set_xlim(xlim) elboax.set_ylim(ylim) if",
"gradax.set_xlim(xlim) elboax.set_ylim(ylim) if gradylimit is not None: xlim, _ = limits gradax.set_xlim(xlim) gradax.set_ylim(*gradylimit)",
"if 'all' in vi_type or 'rqvi' in vi_type: plot(elboax, gradax, experiment.rqvitraces, abscissa=abscissa,marker=rqvi_ls,markevery=every, name='RQVI',",
"elif abscissa == 'epochs': x = np.arange(0, len(losses)) mean_var_ts(ts=losses[num_burnin_steps:], x=x[num_burnin_steps:], ax=elboax, label='MCVI', log_scale=log_scale,",
"log_scale=False, c=TFColor.qmc,num_burnin_steps=num_burnin_steps, gradaxalpha=.6, norm=norm,**kwargs) if 'all' in vi_type or 'rqmc' in vi_type: plot_multiple_traces(elboax,",
"array. Args: array: 2D array. The right-most index is the dimension index. ax:",
"= '#FAA43A' green = '#60BD68' pink = '#F17CB0' brown = '#B2912F' purple =",
"ymax=4.0, mesh_count=1000, name=None): plt.figure() x = tf.linspace(xmin, xmax, mesh_count) y = tf.linspace(ymin, ymax,",
"or 'mc' in vi_type: plot_multiple_traces(elboax, gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC VI Multiple', log_scale=True, num_burnin_steps=num_burnin_steps,",
"np.arange(0, len(loss)) grads = tf.reduce_sum(tf.square(grads), axis=-1)/norm if log_scale is True: grads = tf.math.log(grads)",
"'#DECF3F' gray = '#4D4D4D' qmc = '#33a02c' rqmc = '#fdae61' qvi = '#2c7bb6'",
"gradax, experiment.mcvitraces,color=TFColor.red,marker=mc_ls,markevery=every, abscissa=abscissa, name='MC VI Multiple', log_scale=True, num_burnin_steps=num_burnin_steps, norm=norm,**kwargs) elboax.set_xticks(ticks=[]) if abscissa ==",
"timestamps - timestamps[num_burnin_steps] elif abscissa == 'epochs': x = np.arange(0, len(loss)) grads =",
"ax.plot(q_samples[:,0], q_samples[:,1], 'ko', markersize=2) ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.title.set_text(title) def plot_heatmap_2d(dist, xmin=-4.0, xmax=4.0, ymin=-4.0, ymax=4.0,",
"matplotlib import colors from scipy.spatial import Voronoi, voronoi_plot_2d from scipy.signal import savgol_filter from"
] |
[
"url, include from keychain import keychain_client_urls from keychain import keychain_web_urls from keychain.views import",
"keychain import keychain_web_urls from keychain.views import appview app_name = 'keychain' urlpatterns = [",
"from keychain import keychain_web_urls from keychain.views import appview app_name = 'keychain' urlpatterns =",
"from keychain.views import appview app_name = 'keychain' urlpatterns = [ url(r'^web/', include(keychain_web_urls)), url(r'^client/',",
"include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(), name='app_list'), url(r'^app/signup/$', appview.signup, name='app_signup'), url(r'^app/service/(?P<app_id>\\w{32})/$', appview.service, name='app_service'), url(r'^app/service/(?P<app_id>\\w{32})/(?P<service_id>\\w{32})/$',",
"urlpatterns = [ url(r'^web/', include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(), name='app_list'), url(r'^app/signup/$', appview.signup, name='app_signup'),",
"= [ url(r'^web/', include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(), name='app_list'), url(r'^app/signup/$', appview.signup, name='app_signup'), url(r'^app/service/(?P<app_id>\\w{32})/$',",
"django.conf.urls import url, include from keychain import keychain_client_urls from keychain import keychain_web_urls from",
"keychain.views import appview app_name = 'keychain' urlpatterns = [ url(r'^web/', include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)),",
"url(r'^web/', include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(), name='app_list'), url(r'^app/signup/$', appview.signup, name='app_signup'), url(r'^app/service/(?P<app_id>\\w{32})/$', appview.service, name='app_service'),",
"keychain import keychain_client_urls from keychain import keychain_web_urls from keychain.views import appview app_name =",
"<reponame>osgee/keychainserver from django.conf.urls import url, include from keychain import keychain_client_urls from keychain import",
"[ url(r'^web/', include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(), name='app_list'), url(r'^app/signup/$', appview.signup, name='app_signup'), url(r'^app/service/(?P<app_id>\\w{32})/$', appview.service,",
"keychain_client_urls from keychain import keychain_web_urls from keychain.views import appview app_name = 'keychain' urlpatterns",
"include from keychain import keychain_client_urls from keychain import keychain_web_urls from keychain.views import appview",
"app_name = 'keychain' urlpatterns = [ url(r'^web/', include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(), name='app_list'),",
"include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(), name='app_list'), url(r'^app/signup/$', appview.signup, name='app_signup'), url(r'^app/service/(?P<app_id>\\w{32})/$', appview.service, name='app_service'), url(r'^app/service/(?P<app_id>\\w{32})/(?P<service_id>\\w{32})/$', appview.query, name='service_query'),",
"url(r'^client/', include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(), name='app_list'), url(r'^app/signup/$', appview.signup, name='app_signup'), url(r'^app/service/(?P<app_id>\\w{32})/$', appview.service, name='app_service'), url(r'^app/service/(?P<app_id>\\w{32})/(?P<service_id>\\w{32})/$', appview.query,",
"keychain_web_urls from keychain.views import appview app_name = 'keychain' urlpatterns = [ url(r'^web/', include(keychain_web_urls)),",
"from django.conf.urls import url, include from keychain import keychain_client_urls from keychain import keychain_web_urls",
"= 'keychain' urlpatterns = [ url(r'^web/', include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(), name='app_list'), url(r'^app/signup/$',",
"'keychain' urlpatterns = [ url(r'^web/', include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(), name='app_list'), url(r'^app/signup/$', appview.signup,",
"url(r'^app/$', appview.ListView.as_view(), name='app_list'), url(r'^app/signup/$', appview.signup, name='app_signup'), url(r'^app/service/(?P<app_id>\\w{32})/$', appview.service, name='app_service'), url(r'^app/service/(?P<app_id>\\w{32})/(?P<service_id>\\w{32})/$', appview.query, name='service_query'), ]",
"import keychain_web_urls from keychain.views import appview app_name = 'keychain' urlpatterns = [ url(r'^web/',",
"from keychain import keychain_client_urls from keychain import keychain_web_urls from keychain.views import appview app_name",
"appview app_name = 'keychain' urlpatterns = [ url(r'^web/', include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)), url(r'^app/$', appview.ListView.as_view(),",
"import appview app_name = 'keychain' urlpatterns = [ url(r'^web/', include(keychain_web_urls)), url(r'^client/', include(keychain_client_urls)), url(r'^app/$',",
"import url, include from keychain import keychain_client_urls from keychain import keychain_web_urls from keychain.views",
"import keychain_client_urls from keychain import keychain_web_urls from keychain.views import appview app_name = 'keychain'"
] |
[
"{} sub_message[\"sender_node_id\"] = arguments.node_id sub_message[\"sender_actor_type\"] = \"test_actor\" sub_message[\"sender_instance_id\"] = \"1\" sub_message[\"type\"] = \"subscription_update\"",
"spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] = arguments.node_id spawn_message[\"spawn_code\"] = f\"\"\" function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" ->",
"arguments.node_id spawn_message[\"sender_actor_type\"] = \"test_actor\" spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] = arguments.node_id",
"not res: break size = struct.unpack(\"!i\", res)[0] data = connection.recv(size) if not data:",
"if not res: break size = struct.unpack(\"!i\", res)[0] data = connection.recv(size) if not",
"\"subscription_node_id\" in sub_message spawn_message = {} spawn_message[\"sender_node_id\"] = arguments.node_id spawn_message[\"sender_actor_type\"] = \"test_actor\" spawn_message[\"spawn_node_id\"]",
"1) s.bind((arguments.host, arguments.port)) s.listen() connection, remote = s.accept() print(remote) with connection: sub_message =",
"send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}}, 5000); end if(message[\"type\"] == \"init\")",
"arguments.node_id sub_message[\"sender_actor_type\"] = \"test_actor\" sub_message[\"sender_instance_id\"] = \"1\" sub_message[\"type\"] = \"subscription_update\" sub_message[\"subscription_node_id\"] = arguments.node_id",
"if(message[\"type\"] == \"init\") then delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"exit\"}}, 20000); end if(message[\"type\"] == \"exit\")",
"struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data = connection.recv(sub_message_size) sub_message = msgpack.unpackb(sub_message_data, raw=False) print(sub_message) assert \"subscription_node_id\" in",
"= f\"\"\" function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] == \"init\" or message[\"type\"] ==",
"= sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] = \"1\" spawn_message[\"actor_type\"] = \"lua_runtime\" spawn_msg = msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg)))",
"socket.MsgFlag.MSG_WAITALL) if not res: break size = struct.unpack(\"!i\", res)[0] data = connection.recv(size) if",
"assert \"subscription_node_id\" in sub_message spawn_message = {} spawn_message[\"sender_node_id\"] = arguments.node_id spawn_message[\"sender_actor_type\"] = \"test_actor\"",
"sub_message spawn_message = {} spawn_message[\"sender_node_id\"] = arguments.node_id spawn_message[\"sender_actor_type\"] = \"test_actor\" spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"]",
"actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}}, 5000); end if(message[\"type\"] == \"init\") then delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"exit\"}},",
"actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}}); end end\"\"\" spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] = \"1\" spawn_message[\"actor_type\"] =",
"spawn_message[\"spawn_code\"] = f\"\"\" function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] == \"init\" or message[\"type\"]",
"= arguments.node_id spawn_message[\"spawn_code\"] = f\"\"\" function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] == \"init\"",
"= \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] = arguments.node_id spawn_message[\"spawn_code\"] = f\"\"\" function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id);",
"\"exit\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}}); end end\"\"\" spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] =",
"spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] = \"1\" spawn_message[\"actor_type\"] = \"lua_runtime\" spawn_msg = msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\",",
"connection.send(spawn_msg) while True: res = connection.recv(4, socket.MsgFlag.MSG_WAITALL) if not res: break size =",
"= arguments.node_id spawn_message[\"sender_actor_type\"] = \"test_actor\" spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] =",
"actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}}, 5000); end if(message[\"type\"] == \"init\") then",
"= struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data = connection.recv(sub_message_size) sub_message = msgpack.unpackb(sub_message_data, raw=False) print(sub_message) assert \"subscription_node_id\"",
"raw=False) print(sub_message) assert \"subscription_node_id\" in sub_message spawn_message = {} spawn_message[\"sender_node_id\"] = arguments.node_id spawn_message[\"sender_actor_type\"]",
"= msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg) sub_message_size_data = connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size = struct.unpack(\"!i\", sub_message_size_data)[0]",
"res: break size = struct.unpack(\"!i\", res)[0] data = connection.recv(size) if not data: break",
"type=int) parser.add_argument(\"node_id\") arguments = parser.parse_args() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)",
"res)[0] data = connection.recv(size) if not data: break message = msgpack.unpackb(data, raw=False) print(message)",
"= parser.parse_args() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((arguments.host, arguments.port)) s.listen()",
"= \"1\" sub_message[\"type\"] = \"subscription_update\" sub_message[\"subscription_node_id\"] = arguments.node_id sub_msg = msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg)))",
"struct import msgpack parser = argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\") arguments = parser.parse_args()",
"parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\") arguments = parser.parse_args() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR,",
"spawn_message[\"instance_id\"] = \"1\" spawn_message[\"actor_type\"] = \"lua_runtime\" spawn_msg = msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg) while",
"sub_message = {} sub_message[\"sender_node_id\"] = arguments.node_id sub_message[\"sender_actor_type\"] = \"test_actor\" sub_message[\"sender_instance_id\"] = \"1\" sub_message[\"type\"]",
"if(message[\"type\"] == \"init\" or message[\"type\"] == \"periodic_timer\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id,",
"function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] == \"init\" or message[\"type\"] == \"periodic_timer\") then",
"or message[\"type\"] == \"periodic_timer\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}},",
"break size = struct.unpack(\"!i\", res)[0] data = connection.recv(size) if not data: break message",
"parser.add_argument(\"host\") parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\") arguments = parser.parse_args() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET,",
"print(sub_message) assert \"subscription_node_id\" in sub_message spawn_message = {} spawn_message[\"sender_node_id\"] = arguments.node_id spawn_message[\"sender_actor_type\"] =",
"sub_message = msgpack.unpackb(sub_message_data, raw=False) print(sub_message) assert \"subscription_node_id\" in sub_message spawn_message = {} spawn_message[\"sender_node_id\"]",
"delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}}, 5000); end if(message[\"type\"] == \"init\") then delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id,",
"size = struct.unpack(\"!i\", res)[0] data = connection.recv(size) if not data: break message =",
"receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] == \"init\" or message[\"type\"] == \"periodic_timer\") then send({{node_id=\"{arguments.node_id}\",",
"\"test_actor\" sub_message[\"sender_instance_id\"] = \"1\" sub_message[\"type\"] = \"subscription_update\" sub_message[\"subscription_node_id\"] = arguments.node_id sub_msg = msgpack.packb(sub_message)",
"sub_message[\"sender_instance_id\"] = \"1\" sub_message[\"type\"] = \"subscription_update\" sub_message[\"subscription_node_id\"] = arguments.node_id sub_msg = msgpack.packb(sub_message) connection.send(struct.pack(\"!i\",",
"sub_message_size_data = connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size = struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data = connection.recv(sub_message_size) sub_message =",
"instance_id=\"1\", message=\"goodbye\"}}); end end\"\"\" spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] = \"1\" spawn_message[\"actor_type\"] = \"lua_runtime\"",
"send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}}); end end\"\"\" spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] = \"1\" spawn_message[\"actor_type\"]",
"msgpack parser = argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\") arguments = parser.parse_args() with socket.socket(socket.AF_INET,",
"then delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"exit\"}}, 20000); end if(message[\"type\"] == \"exit\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\",",
"argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\") arguments = parser.parse_args() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:",
"import struct import msgpack parser = argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\") arguments =",
"message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}}, 5000); end if(message[\"type\"] == \"init\") then delayed_publish({{node_id=node_id, actor_type=actor_type,",
"sub_message[\"subscription_node_id\"] = arguments.node_id sub_msg = msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg) sub_message_size_data = connection.recv(4, socket.MsgFlag.MSG_WAITALL)",
"sub_msg = msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg) sub_message_size_data = connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size = struct.unpack(\"!i\",",
"instance_id=instance_id, type=\"exit\"}}, 20000); end if(message[\"type\"] == \"exit\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}}); end",
"== \"periodic_timer\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}}, 5000); end",
"{} spawn_message[\"sender_node_id\"] = arguments.node_id spawn_message[\"sender_actor_type\"] = \"test_actor\" spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\"",
"socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((arguments.host, arguments.port)) s.listen() connection, remote = s.accept()",
"res = connection.recv(4, socket.MsgFlag.MSG_WAITALL) if not res: break size = struct.unpack(\"!i\", res)[0] data",
"remote = s.accept() print(remote) with connection: sub_message = {} sub_message[\"sender_node_id\"] = arguments.node_id sub_message[\"sender_actor_type\"]",
"= struct.unpack(\"!i\", res)[0] data = connection.recv(size) if not data: break message = msgpack.unpackb(data,",
"\"1\" spawn_message[\"actor_type\"] = \"lua_runtime\" spawn_msg = msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg) while True: res",
"end\"\"\" spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] = \"1\" spawn_message[\"actor_type\"] = \"lua_runtime\" spawn_msg = msgpack.packb(spawn_message)",
"struct.unpack(\"!i\", res)[0] data = connection.recv(size) if not data: break message = msgpack.unpackb(data, raw=False)",
"\"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] == \"init\" or message[\"type\"] == \"periodic_timer\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}});",
"= connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size = struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data = connection.recv(sub_message_size) sub_message = msgpack.unpackb(sub_message_data,",
"20000); end if(message[\"type\"] == \"exit\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}}); end end\"\"\" spawn_message[\"node_id\"]",
"argparse import struct import msgpack parser = argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\") arguments",
"\"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] = arguments.node_id spawn_message[\"spawn_code\"] = f\"\"\" function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"]",
"= msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg) while True: res = connection.recv(4, socket.MsgFlag.MSG_WAITALL) if not",
"\"periodic_timer\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}}, 5000); end if(message[\"type\"]",
"type=\"exit\"}}, 20000); end if(message[\"type\"] == \"exit\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}}); end end\"\"\"",
"len(spawn_msg))) connection.send(spawn_msg) while True: res = connection.recv(4, socket.MsgFlag.MSG_WAITALL) if not res: break size",
"-> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] == \"init\" or message[\"type\"] == \"periodic_timer\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\",",
"print(remote) with connection: sub_message = {} sub_message[\"sender_node_id\"] = arguments.node_id sub_message[\"sender_actor_type\"] = \"test_actor\" sub_message[\"sender_instance_id\"]",
"type=\"periodic_timer\"}}, 5000); end if(message[\"type\"] == \"init\") then delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"exit\"}}, 20000); end",
"arguments.node_id sub_msg = msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg) sub_message_size_data = connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size =",
"socket.MsgFlag.MSG_WAITALL) sub_message_size = struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data = connection.recv(sub_message_size) sub_message = msgpack.unpackb(sub_message_data, raw=False) print(sub_message)",
"arguments.port)) s.listen() connection, remote = s.accept() print(remote) with connection: sub_message = {} sub_message[\"sender_node_id\"]",
"\"lua_runtime\" spawn_msg = msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg) while True: res = connection.recv(4, socket.MsgFlag.MSG_WAITALL)",
"instance_id=instance_id, type=\"periodic_timer\"}}, 5000); end if(message[\"type\"] == \"init\") then delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"exit\"}}, 20000);",
"connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg) while True: res = connection.recv(4, socket.MsgFlag.MSG_WAITALL) if not res: break",
"msgpack.unpackb(sub_message_data, raw=False) print(sub_message) assert \"subscription_node_id\" in sub_message spawn_message = {} spawn_message[\"sender_node_id\"] = arguments.node_id",
"= sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] = arguments.node_id spawn_message[\"spawn_code\"] = f\"\"\" function receive(message)",
"sub_message[\"sender_node_id\"] = arguments.node_id sub_message[\"sender_actor_type\"] = \"test_actor\" sub_message[\"sender_instance_id\"] = \"1\" sub_message[\"type\"] = \"subscription_update\" sub_message[\"subscription_node_id\"]",
"with connection: sub_message = {} sub_message[\"sender_node_id\"] = arguments.node_id sub_message[\"sender_actor_type\"] = \"test_actor\" sub_message[\"sender_instance_id\"] =",
"end if(message[\"type\"] == \"init\") then delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"exit\"}}, 20000); end if(message[\"type\"] ==",
"= argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\") arguments = parser.parse_args() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as",
"s.bind((arguments.host, arguments.port)) s.listen() connection, remote = s.accept() print(remote) with connection: sub_message = {}",
"sub_message_data = connection.recv(sub_message_size) sub_message = msgpack.unpackb(sub_message_data, raw=False) print(sub_message) assert \"subscription_node_id\" in sub_message spawn_message",
"spawn_message[\"spawn_instance_id\"] = arguments.node_id spawn_message[\"spawn_code\"] = f\"\"\" function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] ==",
"connection.recv(sub_message_size) sub_message = msgpack.unpackb(sub_message_data, raw=False) print(sub_message) assert \"subscription_node_id\" in sub_message spawn_message = {}",
"instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}}, 5000); end if(message[\"type\"] == \"init\") then delayed_publish({{node_id=node_id,",
"\"init\" or message[\"type\"] == \"periodic_timer\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id,",
"True: res = connection.recv(4, socket.MsgFlag.MSG_WAITALL) if not res: break size = struct.unpack(\"!i\", res)[0]",
"then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}}); end end\"\"\" spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] = \"1\"",
"= \"subscription_update\" sub_message[\"subscription_node_id\"] = arguments.node_id sub_msg = msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg) sub_message_size_data =",
"arguments = parser.parse_args() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((arguments.host, arguments.port))",
"msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg) sub_message_size_data = connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size = struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data",
"then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}}, 5000); end if(message[\"type\"] ==",
"sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] = \"1\" spawn_message[\"actor_type\"] = \"lua_runtime\" spawn_msg = msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg)",
"len(sub_msg))) connection.send(sub_msg) sub_message_size_data = connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size = struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data = connection.recv(sub_message_size)",
"== \"init\" or message[\"type\"] == \"periodic_timer\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type,",
"= \"test_actor\" sub_message[\"sender_instance_id\"] = \"1\" sub_message[\"type\"] = \"subscription_update\" sub_message[\"subscription_node_id\"] = arguments.node_id sub_msg =",
"sub_message_size_data)[0] sub_message_data = connection.recv(sub_message_size) sub_message = msgpack.unpackb(sub_message_data, raw=False) print(sub_message) assert \"subscription_node_id\" in sub_message",
"spawn_message = {} spawn_message[\"sender_node_id\"] = arguments.node_id spawn_message[\"sender_actor_type\"] = \"test_actor\" spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"]",
"spawn_message[\"sender_actor_type\"] = \"test_actor\" spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] = arguments.node_id spawn_message[\"spawn_code\"]",
"5000); end if(message[\"type\"] == \"init\") then delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"exit\"}}, 20000); end if(message[\"type\"]",
"actor_type=actor_type, instance_id=instance_id, type=\"exit\"}}, 20000); end if(message[\"type\"] == \"exit\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}});",
"parser.parse_args() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((arguments.host, arguments.port)) s.listen() connection,",
"= s.accept() print(remote) with connection: sub_message = {} sub_message[\"sender_node_id\"] = arguments.node_id sub_message[\"sender_actor_type\"] =",
"parser = argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\") arguments = parser.parse_args() with socket.socket(socket.AF_INET, socket.SOCK_STREAM)",
"\"1\" sub_message[\"type\"] = \"subscription_update\" sub_message[\"subscription_node_id\"] = arguments.node_id sub_msg = msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg)",
"sub_message_size = struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data = connection.recv(sub_message_size) sub_message = msgpack.unpackb(sub_message_data, raw=False) print(sub_message) assert",
"print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] == \"init\" or message[\"type\"] == \"periodic_timer\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\",",
"message=\"goodbye\"}}); end end\"\"\" spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] = \"1\" spawn_message[\"actor_type\"] = \"lua_runtime\" spawn_msg",
"socket import argparse import struct import msgpack parser = argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\", type=int)",
"end if(message[\"type\"] == \"exit\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}}); end end\"\"\" spawn_message[\"node_id\"] =",
"with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((arguments.host, arguments.port)) s.listen() connection, remote",
"socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((arguments.host, arguments.port)) s.listen() connection, remote =",
"spawn_message[\"sender_node_id\"] = arguments.node_id spawn_message[\"sender_actor_type\"] = \"test_actor\" spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"]",
"s.accept() print(remote) with connection: sub_message = {} sub_message[\"sender_node_id\"] = arguments.node_id sub_message[\"sender_actor_type\"] = \"test_actor\"",
"connection.recv(4, socket.MsgFlag.MSG_WAITALL) if not res: break size = struct.unpack(\"!i\", res)[0] data = connection.recv(size)",
"connection: sub_message = {} sub_message[\"sender_node_id\"] = arguments.node_id sub_message[\"sender_actor_type\"] = \"test_actor\" sub_message[\"sender_instance_id\"] = \"1\"",
"= {} spawn_message[\"sender_node_id\"] = arguments.node_id spawn_message[\"sender_actor_type\"] = \"test_actor\" spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] =",
"\"test_actor\" spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] = arguments.node_id spawn_message[\"spawn_code\"] = f\"\"\"",
"\"init\") then delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"exit\"}}, 20000); end if(message[\"type\"] == \"exit\") then send({{node_id=\"{arguments.node_id}\",",
"= connection.recv(4, socket.MsgFlag.MSG_WAITALL) if not res: break size = struct.unpack(\"!i\", res)[0] data =",
"arguments.node_id spawn_message[\"spawn_code\"] = f\"\"\" function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] == \"init\" or",
"while True: res = connection.recv(4, socket.MsgFlag.MSG_WAITALL) if not res: break size = struct.unpack(\"!i\",",
"f\"\"\" function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\" -> \"..node_id..\".\"..actor_type..\".\"..instance_id); if(message[\"type\"] == \"init\" or message[\"type\"] == \"periodic_timer\")",
"sub_message[\"sender_actor_type\"] = \"test_actor\" sub_message[\"sender_instance_id\"] = \"1\" sub_message[\"type\"] = \"subscription_update\" sub_message[\"subscription_node_id\"] = arguments.node_id sub_msg",
"= arguments.node_id sub_msg = msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg) sub_message_size_data = connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size",
"delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"exit\"}}, 20000); end if(message[\"type\"] == \"exit\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\",",
"connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size = struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data = connection.recv(sub_message_size) sub_message = msgpack.unpackb(sub_message_data, raw=False)",
"spawn_message[\"actor_type\"] = \"lua_runtime\" spawn_msg = msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg) while True: res =",
"import msgpack parser = argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\") arguments = parser.parse_args() with",
"connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg) sub_message_size_data = connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size = struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data =",
"import socket import argparse import struct import msgpack parser = argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\",",
"connection, remote = s.accept() print(remote) with connection: sub_message = {} sub_message[\"sender_node_id\"] = arguments.node_id",
"= \"lua_runtime\" spawn_msg = msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg) while True: res = connection.recv(4,",
"\"subscription_update\" sub_message[\"subscription_node_id\"] = arguments.node_id sub_msg = msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg) sub_message_size_data = connection.recv(4,",
"parser.add_argument(\"node_id\") arguments = parser.parse_args() with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((arguments.host,",
"in sub_message spawn_message = {} spawn_message[\"sender_node_id\"] = arguments.node_id spawn_message[\"sender_actor_type\"] = \"test_actor\" spawn_message[\"spawn_node_id\"] =",
"socket.SO_REUSEADDR, 1) s.bind((arguments.host, arguments.port)) s.listen() connection, remote = s.accept() print(remote) with connection: sub_message",
"sub_message[\"type\"] = \"subscription_update\" sub_message[\"subscription_node_id\"] = arguments.node_id sub_msg = msgpack.packb(sub_message) connection.send(struct.pack(\"!i\", len(sub_msg))) connection.send(sub_msg) sub_message_size_data",
"= {} sub_message[\"sender_node_id\"] = arguments.node_id sub_message[\"sender_actor_type\"] = \"test_actor\" sub_message[\"sender_instance_id\"] = \"1\" sub_message[\"type\"] =",
"== \"init\") then delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"exit\"}}, 20000); end if(message[\"type\"] == \"exit\") then",
"= \"1\" spawn_message[\"actor_type\"] = \"lua_runtime\" spawn_msg = msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg) while True:",
"== \"exit\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}}); end end\"\"\" spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"]",
"msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg) while True: res = connection.recv(4, socket.MsgFlag.MSG_WAITALL) if not res:",
"message[\"type\"] == \"periodic_timer\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"ping\"}}); delayed_publish({{node_id=node_id, actor_type=actor_type, instance_id=instance_id, type=\"periodic_timer\"}}, 5000);",
"s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((arguments.host, arguments.port)) s.listen() connection, remote = s.accept() print(remote) with",
"import argparse import struct import msgpack parser = argparse.ArgumentParser() parser.add_argument(\"host\") parser.add_argument(\"port\", type=int) parser.add_argument(\"node_id\")",
"as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((arguments.host, arguments.port)) s.listen() connection, remote = s.accept() print(remote)",
"s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((arguments.host, arguments.port)) s.listen() connection, remote = s.accept() print(remote) with connection:",
"spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] = arguments.node_id spawn_message[\"spawn_code\"] = f\"\"\" function",
"spawn_msg = msgpack.packb(spawn_message) connection.send(struct.pack(\"!i\", len(spawn_msg))) connection.send(spawn_msg) while True: res = connection.recv(4, socket.MsgFlag.MSG_WAITALL) if",
"<filename>tools/fake_server.py import socket import argparse import struct import msgpack parser = argparse.ArgumentParser() parser.add_argument(\"host\")",
"if(message[\"type\"] == \"exit\") then send({{node_id=\"{arguments.node_id}\", actor_type=\"test_actor\", instance_id=\"1\", message=\"goodbye\"}}); end end\"\"\" spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"]",
"= arguments.node_id sub_message[\"sender_actor_type\"] = \"test_actor\" sub_message[\"sender_instance_id\"] = \"1\" sub_message[\"type\"] = \"subscription_update\" sub_message[\"subscription_node_id\"] =",
"connection.send(sub_msg) sub_message_size_data = connection.recv(4, socket.MsgFlag.MSG_WAITALL) sub_message_size = struct.unpack(\"!i\", sub_message_size_data)[0] sub_message_data = connection.recv(sub_message_size) sub_message",
"= connection.recv(sub_message_size) sub_message = msgpack.unpackb(sub_message_data, raw=False) print(sub_message) assert \"subscription_node_id\" in sub_message spawn_message =",
"end end\"\"\" spawn_message[\"node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"instance_id\"] = \"1\" spawn_message[\"actor_type\"] = \"lua_runtime\" spawn_msg =",
"s.listen() connection, remote = s.accept() print(remote) with connection: sub_message = {} sub_message[\"sender_node_id\"] =",
"= msgpack.unpackb(sub_message_data, raw=False) print(sub_message) assert \"subscription_node_id\" in sub_message spawn_message = {} spawn_message[\"sender_node_id\"] =",
"sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] = arguments.node_id spawn_message[\"spawn_code\"] = f\"\"\" function receive(message) print(message.sender_node_id..\".\"..message.sender_actor_type..\".\"..message.sender_instance_id..\"",
"= \"test_actor\" spawn_message[\"spawn_node_id\"] = sub_message[\"subscription_node_id\"] spawn_message[\"spawn_actor_type\"] = \"remotely_spawned_actor\" spawn_message[\"spawn_instance_id\"] = arguments.node_id spawn_message[\"spawn_code\"] ="
] |
[
"dummy.arg6 # Expected result for arg7 and arg8 basedict = {'arg1': 'prefix_arg1', 'arg2':",
"'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', ]",
"dummy.arg1 assert Transformer().visit(dummy.arg2) is dummy.arg2 assert Transformer().visit(dummy.arg3) is dummy.arg3 assert Transformer().visit(dummy.arg4) is dummy.arg4",
"dummy.arg1 # No string in subtree assert transformed.arg2 == None assert transformed.arg2 is",
"assert Transformer().visit(dummy.arg8) is dummy.arg8 assert Transformer().visit(dummy) is dummy def test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer): def",
"dummy.arg5 # No string in subtree assert transformed.arg6 == {'arg6_k': None} assert transformed.arg6",
"base = DummyModel( arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4 = [],",
"def test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer): def visit_str(self, node): return 'prefix_' + node transformed =",
"else: return node.copy() transformed = CachedTransformer().visit(dummy) assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0] is",
"in subtree assert transformed.arg4 == [] assert transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)},",
"is transformed.arg8[1] def test_cache(dummy): class UncachedTransformer(Transformer): def visit_DummyModel(self, node): if not hasattr(self, 'top'):",
"StrValVisitor().visit(dummy) == [ 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1',",
"+ 1 assert DummyCounter().visit(dummy) == 4 def test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1) is dummy.arg1 assert",
"not control.arg8[1] class CachedTransformer(Transformer): @cache # DO THIS FOR MOST VISITORS def visit_DummyModel(self,",
"= [], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None} ) dummy =",
"= [], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None}, arg7 = base,",
"'arg3_2', 'arg5_v', ] def test_visitor_processed_output(dummy): class DummyCounter(Visitor): '''Simply counts the number of times",
"the dummy class is encountered''' def visit_DummyModel(self, node): return sum(self.generic_visit(node)) + 1 assert",
"{'arg5_k': 'prefix_arg5_v'} assert transformed.arg5 is not dummy.arg5 # No string in subtree assert",
"== {'arg6_k': None} assert transformed.arg6 is dummy.arg6 # Expected result for arg7 and",
"node.copy() control = UncachedTransformer().visit(dummy) assert control.arg7 is not control.arg8[0] assert control.arg8[0] is not",
"from align.schema.visitor import Visitor, Transformer, cache @pytest.fixture def dummy(): class DummyModel(BaseModel): arg1: str",
"is not control.arg8[0] assert control.arg8[0] is not control.arg8[1] class CachedTransformer(Transformer): @cache # DO",
"def test_visitor_no_output(dummy): assert Visitor().visit(dummy) == [] def test_visitor_raw_output(dummy): class StrValVisitor(Visitor): def visit_str(self, node):",
"DummyCounter(Visitor): '''Simply counts the number of times the dummy class is encountered''' def",
"for generic_visitor assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0] is transformed.arg8[1] def test_cache(dummy): class",
"def visit_str(self, node): return node assert StrValVisitor().visit(dummy) == [ 'arg1', 'arg3_1', 'arg3_2', 'arg5_v',",
"'arg3_2'], arg4 = [], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None} )",
"return self.generic_visit(node) else: return node.copy() control = UncachedTransformer().visit(dummy) assert control.arg7 is not control.arg8[0]",
"'prefix_arg3_2'] assert transformed.arg3 is not dummy.arg3 # No string in subtree assert transformed.arg4",
"'arg5_v', ] def test_visitor_processed_output(dummy): class DummyCounter(Visitor): '''Simply counts the number of times the",
"== None assert transformed.arg2 is dummy.arg2 # String in subtree assert transformed.arg3 ==",
"arg7: \"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base = DummyModel( arg1 = 'arg1', arg3 =",
"'prefix_arg1', 'arg2': None, 'arg3': ['prefix_arg3_1', 'prefix_arg3_2'], 'arg4': [], 'arg5': {'arg5_k': 'prefix_arg5_v'}, 'arg6': {'arg6_k':",
"Transformer().visit(dummy.arg8) is dummy.arg8 assert Transformer().visit(dummy) is dummy def test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer): def visit_str(self,",
"# String in subtree assert transformed.arg1 == 'prefix_arg1' assert transformed.arg1 is not dummy.arg1",
"return node assert StrValVisitor().visit(dummy) == [ 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2',",
"subtree assert transformed.arg6 == {'arg6_k': None} assert transformed.arg6 is dummy.arg6 # Expected result",
"assert control.arg7 is not control.arg8[0] assert control.arg8[0] is not control.arg8[1] class CachedTransformer(Transformer): @cache",
"# String in subtree assert transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2'] assert transformed.arg3 is not",
"StrValVisitor(Visitor): def visit_str(self, node): return node assert StrValVisitor().visit(dummy) == [ 'arg1', 'arg3_1', 'arg3_2',",
"visit_str(self, node): return node assert StrValVisitor().visit(dummy) == [ 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1',",
"DummyModel.update_forward_refs() base = DummyModel( arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4 =",
"is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})' # String in subtree assert transformed.arg5 ==",
"is dummy.arg6 # Expected result for arg7 and arg8 basedict = {'arg1': 'prefix_arg1',",
"of times the dummy class is encountered''' def visit_DummyModel(self, node): return sum(self.generic_visit(node)) +",
"assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0] is transformed.arg8[1] def test_cache(dummy): class UncachedTransformer(Transformer): def",
"Dict from align.schema.visitor import Visitor, Transformer, cache @pytest.fixture def dummy(): class DummyModel(BaseModel): arg1:",
"= [base, base] ) return dummy def test_visitor_no_output(dummy): assert Visitor().visit(dummy) == [] def",
"def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top = node return self.generic_visit(node) else:",
"# No string in subtree assert transformed.arg4 == [] assert transformed.arg4 is dummy.arg4,",
"dummy = DummyModel( arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4 = [],",
"{'arg6_k': None} ) dummy = DummyModel( arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'],",
"hasattr(self, 'top'): self.top = node return self.generic_visit(node) else: return node.copy() control = UncachedTransformer().visit(dummy)",
"arg2: Optional[str] arg3: List[str] arg4: List[Optional[str]] arg5: Dict[str, str] arg6: Dict[str, Optional[str]] arg7:",
"is dummy.arg8 assert Transformer().visit(dummy) is dummy def test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer): def visit_str(self, node):",
"[base, base] ) return dummy def test_visitor_no_output(dummy): assert Visitor().visit(dummy) == [] def test_visitor_raw_output(dummy):",
"pytest from align.schema.types import BaseModel, Optional, List, Dict from align.schema.visitor import Visitor, Transformer,",
"return node.copy() transformed = CachedTransformer().visit(dummy) assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0] is transformed.arg8[1]",
"dummy(): class DummyModel(BaseModel): arg1: str arg2: Optional[str] arg3: List[str] arg4: List[Optional[str]] arg5: Dict[str,",
"# Expected result for arg7 and arg8 basedict = {'arg1': 'prefix_arg1', 'arg2': None,",
"arg8 basedict = {'arg1': 'prefix_arg1', 'arg2': None, 'arg3': ['prefix_arg3_1', 'prefix_arg3_2'], 'arg4': [], 'arg5':",
"'arg3_2'], arg4 = [], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None}, arg7",
"= 'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4 = [], arg5 = {'arg5_k': 'arg5_v'},",
"not hasattr(self, 'top'): self.top = node return self.generic_visit(node) else: return node.copy() control =",
"== 4 def test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1) is dummy.arg1 assert Transformer().visit(dummy.arg2) is dummy.arg2 assert",
"'arg2': None, 'arg3': ['prefix_arg3_1', 'prefix_arg3_2'], 'arg4': [], 'arg5': {'arg5_k': 'prefix_arg5_v'}, 'arg6': {'arg6_k': None},",
"dummy.arg4 assert Transformer().visit(dummy.arg5) is dummy.arg5 assert Transformer().visit(dummy.arg6) is dummy.arg6 assert Transformer().visit(dummy.arg7) is dummy.arg7",
"= {'arg6_k': None} ) dummy = DummyModel( arg1 = 'arg1', arg3 = ['arg3_1',",
"self.top = node return self.generic_visit(node) else: return node.copy() control = UncachedTransformer().visit(dummy) assert control.arg7",
"== [ 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2',",
"'top'): self.top = node return self.generic_visit(node) else: return node.copy() transformed = CachedTransformer().visit(dummy) assert",
"DummyModel(BaseModel): arg1: str arg2: Optional[str] arg3: List[str] arg4: List[Optional[str]] arg5: Dict[str, str] arg6:",
"Visitor().visit(dummy) == [] def test_visitor_raw_output(dummy): class StrValVisitor(Visitor): def visit_str(self, node): return node assert",
"DummyCounter().visit(dummy) == 4 def test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1) is dummy.arg1 assert Transformer().visit(dummy.arg2) is dummy.arg2",
"None} assert transformed.arg6 is dummy.arg6 # Expected result for arg7 and arg8 basedict",
"counts the number of times the dummy class is encountered''' def visit_DummyModel(self, node):",
"'arg5_v'}, arg6 = {'arg6_k': None} ) dummy = DummyModel( arg1 = 'arg1', arg3",
"assert Transformer().visit(dummy.arg3) is dummy.arg3 assert Transformer().visit(dummy.arg4) is dummy.arg4 assert Transformer().visit(dummy.arg5) is dummy.arg5 assert",
"{'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None}, arg7 = base, arg8 = [base, base]",
"is dummy.arg6 assert Transformer().visit(dummy.arg7) is dummy.arg7 assert Transformer().visit(dummy.arg8) is dummy.arg8 assert Transformer().visit(dummy) is",
"working for generic_visitor assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0] is transformed.arg8[1] def test_cache(dummy):",
"= ['arg3_1', 'arg3_2'], arg4 = [], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k':",
"String in subtree assert transformed.arg8 == [basedict, basedict] assert transformed.arg8 is not dummy.arg8",
"transformed.arg3 is not dummy.arg3 # No string in subtree assert transformed.arg4 == []",
"] def test_visitor_processed_output(dummy): class DummyCounter(Visitor): '''Simply counts the number of times the dummy",
"# String in subtree assert transformed.arg8 == [basedict, basedict] assert transformed.arg8 is not",
"arg6: Dict[str, Optional[str]] arg7: \"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base = DummyModel( arg1 =",
"basedict] assert transformed.arg8 is not dummy.arg8 # Ensure cache is working for generic_visitor",
"assert transformed.arg3 is not dummy.arg3 # No string in subtree assert transformed.arg4 ==",
"'arg3': ['prefix_arg3_1', 'prefix_arg3_2'], 'arg4': [], 'arg5': {'arg5_k': 'prefix_arg5_v'}, 'arg6': {'arg6_k': None}, 'arg7': None,",
"not dummy.arg8 # Ensure cache is working for generic_visitor assert transformed.arg7 is transformed.arg8[0]",
"Dict[str, str] arg6: Dict[str, Optional[str]] arg7: \"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base = DummyModel(",
"the number of times the dummy class is encountered''' def visit_DummyModel(self, node): return",
"'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', ] def test_visitor_processed_output(dummy):",
"dummy.arg3 assert Transformer().visit(dummy.arg4) is dummy.arg4 assert Transformer().visit(dummy.arg5) is dummy.arg5 assert Transformer().visit(dummy.arg6) is dummy.arg6",
"not dummy.arg1 # No string in subtree assert transformed.arg2 == None assert transformed.arg2",
"from align.schema.types import BaseModel, Optional, List, Dict from align.schema.visitor import Visitor, Transformer, cache",
"encountered''' def visit_DummyModel(self, node): return sum(self.generic_visit(node)) + 1 assert DummyCounter().visit(dummy) == 4 def",
"if not hasattr(self, 'top'): self.top = node return self.generic_visit(node) else: return node.copy() control",
"assert transformed.arg7 == basedict assert transformed.arg7 is not dummy.arg7 # String in subtree",
"None assert transformed.arg2 is dummy.arg2 # String in subtree assert transformed.arg3 == ['prefix_arg3_1',",
"is dummy.arg2 assert Transformer().visit(dummy.arg3) is dummy.arg3 assert Transformer().visit(dummy.arg4) is dummy.arg4 assert Transformer().visit(dummy.arg5) is",
"isinstance(transformed, dummy.__class__) # String in subtree assert transformed.arg1 == 'prefix_arg1' assert transformed.arg1 is",
"in subtree assert transformed.arg5 == {'arg5_k': 'prefix_arg5_v'} assert transformed.arg5 is not dummy.arg5 #",
"Transformer().visit(dummy.arg2) is dummy.arg2 assert Transformer().visit(dummy.arg3) is dummy.arg3 assert Transformer().visit(dummy.arg4) is dummy.arg4 assert Transformer().visit(dummy.arg5)",
"node transformed = AddStringPrefix().visit(dummy) assert isinstance(transformed, dummy.__class__) # String in subtree assert transformed.arg1",
"control.arg8[0] assert control.arg8[0] is not control.arg8[1] class CachedTransformer(Transformer): @cache # DO THIS FOR",
"1 assert DummyCounter().visit(dummy) == 4 def test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1) is dummy.arg1 assert Transformer().visit(dummy.arg2)",
"No string in subtree assert transformed.arg4 == [] assert transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)},",
"\"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base = DummyModel( arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4",
"Expected result for arg7 and arg8 basedict = {'arg1': 'prefix_arg1', 'arg2': None, 'arg3':",
"is encountered''' def visit_DummyModel(self, node): return sum(self.generic_visit(node)) + 1 assert DummyCounter().visit(dummy) == 4",
"is not dummy.arg8 # Ensure cache is working for generic_visitor assert transformed.arg7 is",
"'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4 = [], arg5 = {'arg5_k': 'arg5_v'}, arg6",
"arg1: str arg2: Optional[str] arg3: List[str] arg4: List[Optional[str]] arg5: Dict[str, str] arg6: Dict[str,",
"= {'arg1': 'prefix_arg1', 'arg2': None, 'arg3': ['prefix_arg3_1', 'prefix_arg3_2'], 'arg4': [], 'arg5': {'arg5_k': 'prefix_arg5_v'},",
"import BaseModel, Optional, List, Dict from align.schema.visitor import Visitor, Transformer, cache @pytest.fixture def",
"== [] def test_visitor_raw_output(dummy): class StrValVisitor(Visitor): def visit_str(self, node): return node assert StrValVisitor().visit(dummy)",
"FOR MOST VISITORS def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top = node",
"arg8 = [base, base] ) return dummy def test_visitor_no_output(dummy): assert Visitor().visit(dummy) == []",
"+ node transformed = AddStringPrefix().visit(dummy) assert isinstance(transformed, dummy.__class__) # String in subtree assert",
"'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2',",
"dummy def test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer): def visit_str(self, node): return 'prefix_' + node transformed",
"transformed.arg2 is dummy.arg2 # String in subtree assert transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2'] assert",
"Transformer().visit(dummy.arg4) is dummy.arg4 assert Transformer().visit(dummy.arg5) is dummy.arg5 assert Transformer().visit(dummy.arg6) is dummy.arg6 assert Transformer().visit(dummy.arg7)",
"'prefix_arg5_v'}, 'arg6': {'arg6_k': None}, 'arg7': None, 'arg8': None} # String in subtree assert",
"assert transformed.arg6 is dummy.arg6 # Expected result for arg7 and arg8 basedict =",
"{'arg6_k': None}, arg7 = base, arg8 = [base, base] ) return dummy def",
"arg6 = {'arg6_k': None}, arg7 = base, arg8 = [base, base] ) return",
"assert transformed.arg5 == {'arg5_k': 'prefix_arg5_v'} assert transformed.arg5 is not dummy.arg5 # No string",
"{transformed.arg4})' # String in subtree assert transformed.arg5 == {'arg5_k': 'prefix_arg5_v'} assert transformed.arg5 is",
"# String in subtree assert transformed.arg5 == {'arg5_k': 'prefix_arg5_v'} assert transformed.arg5 is not",
"assert isinstance(transformed, dummy.__class__) # String in subtree assert transformed.arg1 == 'prefix_arg1' assert transformed.arg1",
"'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1',",
"return node.copy() control = UncachedTransformer().visit(dummy) assert control.arg7 is not control.arg8[0] assert control.arg8[0] is",
"control.arg8[1] class CachedTransformer(Transformer): @cache # DO THIS FOR MOST VISITORS def visit_DummyModel(self, node):",
"is transformed.arg8[0] assert transformed.arg8[0] is transformed.arg8[1] def test_cache(dummy): class UncachedTransformer(Transformer): def visit_DummyModel(self, node):",
"String in subtree assert transformed.arg5 == {'arg5_k': 'prefix_arg5_v'} assert transformed.arg5 is not dummy.arg5",
"assert transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2'] assert transformed.arg3 is not dummy.arg3 # No string",
"is not dummy.arg3 # No string in subtree assert transformed.arg4 == [] assert",
"MOST VISITORS def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top = node return",
"dummy.arg3 # No string in subtree assert transformed.arg4 == [] assert transformed.arg4 is",
"in subtree assert transformed.arg6 == {'arg6_k': None} assert transformed.arg6 is dummy.arg6 # Expected",
"assert transformed.arg6 == {'arg6_k': None} assert transformed.arg6 is dummy.arg6 # Expected result for",
"# String in subtree assert transformed.arg7 == basedict assert transformed.arg7 is not dummy.arg7",
"not hasattr(self, 'top'): self.top = node return self.generic_visit(node) else: return node.copy() transformed =",
"dummy.arg5 assert Transformer().visit(dummy.arg6) is dummy.arg6 assert Transformer().visit(dummy.arg7) is dummy.arg7 assert Transformer().visit(dummy.arg8) is dummy.arg8",
"class AddStringPrefix(Transformer): def visit_str(self, node): return 'prefix_' + node transformed = AddStringPrefix().visit(dummy) assert",
"def dummy(): class DummyModel(BaseModel): arg1: str arg2: Optional[str] arg3: List[str] arg4: List[Optional[str]] arg5:",
"assert transformed.arg7 is not dummy.arg7 # String in subtree assert transformed.arg8 == [basedict,",
"CachedTransformer(Transformer): @cache # DO THIS FOR MOST VISITORS def visit_DummyModel(self, node): if not",
"{'arg6_k': None}, 'arg7': None, 'arg8': None} # String in subtree assert transformed.arg7 ==",
"= node return self.generic_visit(node) else: return node.copy() transformed = CachedTransformer().visit(dummy) assert transformed.arg7 is",
"assert transformed.arg4 == [] assert transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})' #",
"None}, arg7 = base, arg8 = [base, base] ) return dummy def test_visitor_no_output(dummy):",
"AddStringPrefix(Transformer): def visit_str(self, node): return 'prefix_' + node transformed = AddStringPrefix().visit(dummy) assert isinstance(transformed,",
"'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', ] def",
"None, 'arg8': None} # String in subtree assert transformed.arg7 == basedict assert transformed.arg7",
"# No string in subtree assert transformed.arg2 == None assert transformed.arg2 is dummy.arg2",
"'arg6': {'arg6_k': None}, 'arg7': None, 'arg8': None} # String in subtree assert transformed.arg7",
"assert transformed.arg8[0] is transformed.arg8[1] def test_cache(dummy): class UncachedTransformer(Transformer): def visit_DummyModel(self, node): if not",
"[basedict, basedict] assert transformed.arg8 is not dummy.arg8 # Ensure cache is working for",
"'arg8': None} # String in subtree assert transformed.arg7 == basedict assert transformed.arg7 is",
"test_visitor_processed_output(dummy): class DummyCounter(Visitor): '''Simply counts the number of times the dummy class is",
"assert transformed.arg5 is not dummy.arg5 # No string in subtree assert transformed.arg6 ==",
"= base, arg8 = [base, base] ) return dummy def test_visitor_no_output(dummy): assert Visitor().visit(dummy)",
"arg6 = {'arg6_k': None} ) dummy = DummyModel( arg1 = 'arg1', arg3 =",
"'arg5': {'arg5_k': 'prefix_arg5_v'}, 'arg6': {'arg6_k': None}, 'arg7': None, 'arg8': None} # String in",
"and arg8 basedict = {'arg1': 'prefix_arg1', 'arg2': None, 'arg3': ['prefix_arg3_1', 'prefix_arg3_2'], 'arg4': [],",
"transformed.arg5 is not dummy.arg5 # No string in subtree assert transformed.arg6 == {'arg6_k':",
"None, 'arg3': ['prefix_arg3_1', 'prefix_arg3_2'], 'arg4': [], 'arg5': {'arg5_k': 'prefix_arg5_v'}, 'arg6': {'arg6_k': None}, 'arg7':",
"List[str] arg4: List[Optional[str]] arg5: Dict[str, str] arg6: Dict[str, Optional[str]] arg7: \"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\"",
"class DummyCounter(Visitor): '''Simply counts the number of times the dummy class is encountered'''",
"== 'prefix_arg1' assert transformed.arg1 is not dummy.arg1 # No string in subtree assert",
"assert StrValVisitor().visit(dummy) == [ 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1',",
"else: return node.copy() control = UncachedTransformer().visit(dummy) assert control.arg7 is not control.arg8[0] assert control.arg8[0]",
"def visit_DummyModel(self, node): return sum(self.generic_visit(node)) + 1 assert DummyCounter().visit(dummy) == 4 def test_transformer_no_visitor(dummy):",
"assert transformed.arg2 is dummy.arg2 # String in subtree assert transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2']",
"# Ensure cache is working for generic_visitor assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0]",
"arg4: List[Optional[str]] arg5: Dict[str, str] arg6: Dict[str, Optional[str]] arg7: \"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs()",
"None} ) dummy = DummyModel( arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4",
"is not dummy.arg7 # String in subtree assert transformed.arg8 == [basedict, basedict] assert",
"No string in subtree assert transformed.arg2 == None assert transformed.arg2 is dummy.arg2 #",
"[] assert transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})' # String in subtree",
"string in subtree assert transformed.arg4 == [] assert transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}),",
"['prefix_arg3_1', 'prefix_arg3_2'] assert transformed.arg3 is not dummy.arg3 # No string in subtree assert",
"transformed.arg7 is not dummy.arg7 # String in subtree assert transformed.arg8 == [basedict, basedict]",
"List, Dict from align.schema.visitor import Visitor, Transformer, cache @pytest.fixture def dummy(): class DummyModel(BaseModel):",
"transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})' # String in subtree assert transformed.arg5",
"assert transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})' # String in subtree assert",
"transformed.arg6 is dummy.arg6 # Expected result for arg7 and arg8 basedict = {'arg1':",
"def test_visitor_raw_output(dummy): class StrValVisitor(Visitor): def visit_str(self, node): return node assert StrValVisitor().visit(dummy) == [",
"transformed.arg4 == [] assert transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})' # String",
"self.generic_visit(node) else: return node.copy() control = UncachedTransformer().visit(dummy) assert control.arg7 is not control.arg8[0] assert",
"return 'prefix_' + node transformed = AddStringPrefix().visit(dummy) assert isinstance(transformed, dummy.__class__) # String in",
"transformed.arg8 is not dummy.arg8 # Ensure cache is working for generic_visitor assert transformed.arg7",
"# No string in subtree assert transformed.arg6 == {'arg6_k': None} assert transformed.arg6 is",
"'prefix_' + node transformed = AddStringPrefix().visit(dummy) assert isinstance(transformed, dummy.__class__) # String in subtree",
"Optional, List, Dict from align.schema.visitor import Visitor, Transformer, cache @pytest.fixture def dummy(): class",
"string in subtree assert transformed.arg2 == None assert transformed.arg2 is dummy.arg2 # String",
"{'arg6_k': None} assert transformed.arg6 is dummy.arg6 # Expected result for arg7 and arg8",
"== basedict assert transformed.arg7 is not dummy.arg7 # String in subtree assert transformed.arg8",
"import pytest from align.schema.types import BaseModel, Optional, List, Dict from align.schema.visitor import Visitor,",
"assert transformed.arg8 == [basedict, basedict] assert transformed.arg8 is not dummy.arg8 # Ensure cache",
"4 def test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1) is dummy.arg1 assert Transformer().visit(dummy.arg2) is dummy.arg2 assert Transformer().visit(dummy.arg3)",
"dummy.arg2 assert Transformer().visit(dummy.arg3) is dummy.arg3 assert Transformer().visit(dummy.arg4) is dummy.arg4 assert Transformer().visit(dummy.arg5) is dummy.arg5",
"visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top = node return self.generic_visit(node) else: return",
"'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1',",
"node return self.generic_visit(node) else: return node.copy() transformed = CachedTransformer().visit(dummy) assert transformed.arg7 is transformed.arg8[0]",
"'arg3_1', 'arg3_2', 'arg5_v', ] def test_visitor_processed_output(dummy): class DummyCounter(Visitor): '''Simply counts the number of",
"assert Transformer().visit(dummy.arg1) is dummy.arg1 assert Transformer().visit(dummy.arg2) is dummy.arg2 assert Transformer().visit(dummy.arg3) is dummy.arg3 assert",
"new:({id(transformed.arg4)}, {transformed.arg4})' # String in subtree assert transformed.arg5 == {'arg5_k': 'prefix_arg5_v'} assert transformed.arg5",
"class UncachedTransformer(Transformer): def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top = node return",
"control.arg7 is not control.arg8[0] assert control.arg8[0] is not control.arg8[1] class CachedTransformer(Transformer): @cache #",
"test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer): def visit_str(self, node): return 'prefix_' + node transformed = AddStringPrefix().visit(dummy)",
"arg4 = [], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None} ) dummy",
"transformed.arg2 == None assert transformed.arg2 is dummy.arg2 # String in subtree assert transformed.arg3",
"{'arg1': 'prefix_arg1', 'arg2': None, 'arg3': ['prefix_arg3_1', 'prefix_arg3_2'], 'arg4': [], 'arg5': {'arg5_k': 'prefix_arg5_v'}, 'arg6':",
"class StrValVisitor(Visitor): def visit_str(self, node): return node assert StrValVisitor().visit(dummy) == [ 'arg1', 'arg3_1',",
"No string in subtree assert transformed.arg6 == {'arg6_k': None} assert transformed.arg6 is dummy.arg6",
"assert transformed.arg8 is not dummy.arg8 # Ensure cache is working for generic_visitor assert",
"[], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None} ) dummy = DummyModel(",
"assert transformed.arg1 == 'prefix_arg1' assert transformed.arg1 is not dummy.arg1 # No string in",
") dummy = DummyModel( arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4 =",
"'arg4': [], 'arg5': {'arg5_k': 'prefix_arg5_v'}, 'arg6': {'arg6_k': None}, 'arg7': None, 'arg8': None} #",
"{'arg5_k': 'prefix_arg5_v'}, 'arg6': {'arg6_k': None}, 'arg7': None, 'arg8': None} # String in subtree",
"String in subtree assert transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2'] assert transformed.arg3 is not dummy.arg3",
"'prefix_arg1' assert transformed.arg1 is not dummy.arg1 # No string in subtree assert transformed.arg2",
"transformed.arg8[0] assert transformed.arg8[0] is transformed.arg8[1] def test_cache(dummy): class UncachedTransformer(Transformer): def visit_DummyModel(self, node): if",
"return sum(self.generic_visit(node)) + 1 assert DummyCounter().visit(dummy) == 4 def test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1) is",
"arg7 and arg8 basedict = {'arg1': 'prefix_arg1', 'arg2': None, 'arg3': ['prefix_arg3_1', 'prefix_arg3_2'], 'arg4':",
"= DummyModel( arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4 = [], arg5",
"node): return 'prefix_' + node transformed = AddStringPrefix().visit(dummy) assert isinstance(transformed, dummy.__class__) # String",
"Transformer().visit(dummy) is dummy def test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer): def visit_str(self, node): return 'prefix_' +",
"not dummy.arg7 # String in subtree assert transformed.arg8 == [basedict, basedict] assert transformed.arg8",
"assert DummyCounter().visit(dummy) == 4 def test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1) is dummy.arg1 assert Transformer().visit(dummy.arg2) is",
"== {'arg5_k': 'prefix_arg5_v'} assert transformed.arg5 is not dummy.arg5 # No string in subtree",
"'top'): self.top = node return self.generic_visit(node) else: return node.copy() control = UncachedTransformer().visit(dummy) assert",
"arg3: List[str] arg4: List[Optional[str]] arg5: Dict[str, str] arg6: Dict[str, Optional[str]] arg7: \"Optional[DummyModel]\" arg8:",
"{'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None} ) dummy = DummyModel( arg1 = 'arg1',",
"Transformer().visit(dummy.arg6) is dummy.arg6 assert Transformer().visit(dummy.arg7) is dummy.arg7 assert Transformer().visit(dummy.arg8) is dummy.arg8 assert Transformer().visit(dummy)",
"test_visitor_raw_output(dummy): class StrValVisitor(Visitor): def visit_str(self, node): return node assert StrValVisitor().visit(dummy) == [ 'arg1',",
"is dummy.arg7 assert Transformer().visit(dummy.arg8) is dummy.arg8 assert Transformer().visit(dummy) is dummy def test_transformer_string_visitor(dummy): class",
"transformed.arg8[1] def test_cache(dummy): class UncachedTransformer(Transformer): def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top",
"Ensure cache is working for generic_visitor assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0] is",
"subtree assert transformed.arg2 == None assert transformed.arg2 is dummy.arg2 # String in subtree",
"base, arg8 = [base, base] ) return dummy def test_visitor_no_output(dummy): assert Visitor().visit(dummy) ==",
"transformed.arg5 == {'arg5_k': 'prefix_arg5_v'} assert transformed.arg5 is not dummy.arg5 # No string in",
"@pytest.fixture def dummy(): class DummyModel(BaseModel): arg1: str arg2: Optional[str] arg3: List[str] arg4: List[Optional[str]]",
"result for arg7 and arg8 basedict = {'arg1': 'prefix_arg1', 'arg2': None, 'arg3': ['prefix_arg3_1',",
"basedict = {'arg1': 'prefix_arg1', 'arg2': None, 'arg3': ['prefix_arg3_1', 'prefix_arg3_2'], 'arg4': [], 'arg5': {'arg5_k':",
"test_visitor_no_output(dummy): assert Visitor().visit(dummy) == [] def test_visitor_raw_output(dummy): class StrValVisitor(Visitor): def visit_str(self, node): return",
"class is encountered''' def visit_DummyModel(self, node): return sum(self.generic_visit(node)) + 1 assert DummyCounter().visit(dummy) ==",
"not control.arg8[0] assert control.arg8[0] is not control.arg8[1] class CachedTransformer(Transformer): @cache # DO THIS",
"for arg7 and arg8 basedict = {'arg1': 'prefix_arg1', 'arg2': None, 'arg3': ['prefix_arg3_1', 'prefix_arg3_2'],",
"control = UncachedTransformer().visit(dummy) assert control.arg7 is not control.arg8[0] assert control.arg8[0] is not control.arg8[1]",
"transformed.arg1 == 'prefix_arg1' assert transformed.arg1 is not dummy.arg1 # No string in subtree",
"is not control.arg8[1] class CachedTransformer(Transformer): @cache # DO THIS FOR MOST VISITORS def",
"['arg3_1', 'arg3_2'], arg4 = [], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None},",
"Visitor, Transformer, cache @pytest.fixture def dummy(): class DummyModel(BaseModel): arg1: str arg2: Optional[str] arg3:",
"self.generic_visit(node) else: return node.copy() transformed = CachedTransformer().visit(dummy) assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0]",
"List[Optional[str]] arg5: Dict[str, str] arg6: Dict[str, Optional[str]] arg7: \"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base",
"not dummy.arg5 # No string in subtree assert transformed.arg6 == {'arg6_k': None} assert",
"is dummy.arg4 assert Transformer().visit(dummy.arg5) is dummy.arg5 assert Transformer().visit(dummy.arg6) is dummy.arg6 assert Transformer().visit(dummy.arg7) is",
"'arg7': None, 'arg8': None} # String in subtree assert transformed.arg7 == basedict assert",
"def test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1) is dummy.arg1 assert Transformer().visit(dummy.arg2) is dummy.arg2 assert Transformer().visit(dummy.arg3) is",
"dummy.arg2 # String in subtree assert transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2'] assert transformed.arg3 is",
"[] def test_visitor_raw_output(dummy): class StrValVisitor(Visitor): def visit_str(self, node): return node assert StrValVisitor().visit(dummy) ==",
"== [] assert transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})' # String in",
"assert Transformer().visit(dummy.arg6) is dummy.arg6 assert Transformer().visit(dummy.arg7) is dummy.arg7 assert Transformer().visit(dummy.arg8) is dummy.arg8 assert",
"node): if not hasattr(self, 'top'): self.top = node return self.generic_visit(node) else: return node.copy()",
"assert Transformer().visit(dummy.arg2) is dummy.arg2 assert Transformer().visit(dummy.arg3) is dummy.arg3 assert Transformer().visit(dummy.arg4) is dummy.arg4 assert",
"{dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})' # String in subtree assert transformed.arg5 == {'arg5_k': 'prefix_arg5_v'} assert",
"'arg1', 'arg3_1', 'arg3_2', 'arg5_v', ] def test_visitor_processed_output(dummy): class DummyCounter(Visitor): '''Simply counts the number",
"node assert StrValVisitor().visit(dummy) == [ 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v',",
"dummy def test_visitor_no_output(dummy): assert Visitor().visit(dummy) == [] def test_visitor_raw_output(dummy): class StrValVisitor(Visitor): def visit_str(self,",
"assert Transformer().visit(dummy.arg5) is dummy.arg5 assert Transformer().visit(dummy.arg6) is dummy.arg6 assert Transformer().visit(dummy.arg7) is dummy.arg7 assert",
"Transformer().visit(dummy.arg7) is dummy.arg7 assert Transformer().visit(dummy.arg8) is dummy.arg8 assert Transformer().visit(dummy) is dummy def test_transformer_string_visitor(dummy):",
"None}, 'arg7': None, 'arg8': None} # String in subtree assert transformed.arg7 == basedict",
"= {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None}, arg7 = base, arg8 = [base,",
"transformed = AddStringPrefix().visit(dummy) assert isinstance(transformed, dummy.__class__) # String in subtree assert transformed.arg1 ==",
"'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', ] def test_visitor_processed_output(dummy): class DummyCounter(Visitor): '''Simply counts the",
"subtree assert transformed.arg5 == {'arg5_k': 'prefix_arg5_v'} assert transformed.arg5 is not dummy.arg5 # No",
"arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None}, arg7 = base, arg8 =",
"node return self.generic_visit(node) else: return node.copy() control = UncachedTransformer().visit(dummy) assert control.arg7 is not",
"assert transformed.arg1 is not dummy.arg1 # No string in subtree assert transformed.arg2 ==",
"VISITORS def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top = node return self.generic_visit(node)",
"subtree assert transformed.arg4 == [] assert transformed.arg4 is dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})'",
"Transformer().visit(dummy.arg1) is dummy.arg1 assert Transformer().visit(dummy.arg2) is dummy.arg2 assert Transformer().visit(dummy.arg3) is dummy.arg3 assert Transformer().visit(dummy.arg4)",
"String in subtree assert transformed.arg1 == 'prefix_arg1' assert transformed.arg1 is not dummy.arg1 #",
"dummy.arg7 assert Transformer().visit(dummy.arg8) is dummy.arg8 assert Transformer().visit(dummy) is dummy def test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer):",
"'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', ] def test_visitor_processed_output(dummy): class DummyCounter(Visitor): '''Simply",
"arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4 = [], arg5 = {'arg5_k':",
"align.schema.types import BaseModel, Optional, List, Dict from align.schema.visitor import Visitor, Transformer, cache @pytest.fixture",
"AddStringPrefix().visit(dummy) assert isinstance(transformed, dummy.__class__) # String in subtree assert transformed.arg1 == 'prefix_arg1' assert",
"\"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base = DummyModel( arg1 = 'arg1', arg3 = ['arg3_1',",
"Optional[str]] arg7: \"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base = DummyModel( arg1 = 'arg1', arg3",
"dummy class is encountered''' def visit_DummyModel(self, node): return sum(self.generic_visit(node)) + 1 assert DummyCounter().visit(dummy)",
"transformed.arg6 == {'arg6_k': None} assert transformed.arg6 is dummy.arg6 # Expected result for arg7",
"assert Transformer().visit(dummy) is dummy def test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer): def visit_str(self, node): return 'prefix_'",
"transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2'] assert transformed.arg3 is not dummy.arg3 # No string in",
"'prefix_arg3_2'], 'arg4': [], 'arg5': {'arg5_k': 'prefix_arg5_v'}, 'arg6': {'arg6_k': None}, 'arg7': None, 'arg8': None}",
"is dummy.arg3 assert Transformer().visit(dummy.arg4) is dummy.arg4 assert Transformer().visit(dummy.arg5) is dummy.arg5 assert Transformer().visit(dummy.arg6) is",
"DummyModel( arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'], arg4 = [], arg5 =",
") return dummy def test_visitor_no_output(dummy): assert Visitor().visit(dummy) == [] def test_visitor_raw_output(dummy): class StrValVisitor(Visitor):",
"is dummy.arg1 assert Transformer().visit(dummy.arg2) is dummy.arg2 assert Transformer().visit(dummy.arg3) is dummy.arg3 assert Transformer().visit(dummy.arg4) is",
"visit_DummyModel(self, node): return sum(self.generic_visit(node)) + 1 assert DummyCounter().visit(dummy) == 4 def test_transformer_no_visitor(dummy): assert",
"assert control.arg8[0] is not control.arg8[1] class CachedTransformer(Transformer): @cache # DO THIS FOR MOST",
"['arg3_1', 'arg3_2'], arg4 = [], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None}",
"def test_visitor_processed_output(dummy): class DummyCounter(Visitor): '''Simply counts the number of times the dummy class",
"str arg2: Optional[str] arg3: List[str] arg4: List[Optional[str]] arg5: Dict[str, str] arg6: Dict[str, Optional[str]]",
"dummy.arg6 assert Transformer().visit(dummy.arg7) is dummy.arg7 assert Transformer().visit(dummy.arg8) is dummy.arg8 assert Transformer().visit(dummy) is dummy",
"THIS FOR MOST VISITORS def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top =",
"node): return sum(self.generic_visit(node)) + 1 assert DummyCounter().visit(dummy) == 4 def test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1)",
"is not dummy.arg5 # No string in subtree assert transformed.arg6 == {'arg6_k': None}",
"[], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None}, arg7 = base, arg8",
"basedict assert transformed.arg7 is not dummy.arg7 # String in subtree assert transformed.arg8 ==",
"if not hasattr(self, 'top'): self.top = node return self.generic_visit(node) else: return node.copy() transformed",
"Transformer().visit(dummy.arg3) is dummy.arg3 assert Transformer().visit(dummy.arg4) is dummy.arg4 assert Transformer().visit(dummy.arg5) is dummy.arg5 assert Transformer().visit(dummy.arg6)",
"transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0] is transformed.arg8[1] def test_cache(dummy): class UncachedTransformer(Transformer): def visit_DummyModel(self,",
"arg5: Dict[str, str] arg6: Dict[str, Optional[str]] arg7: \"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base =",
"in subtree assert transformed.arg1 == 'prefix_arg1' assert transformed.arg1 is not dummy.arg1 # No",
"is dummy.arg2 # String in subtree assert transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2'] assert transformed.arg3",
"== [basedict, basedict] assert transformed.arg8 is not dummy.arg8 # Ensure cache is working",
"test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1) is dummy.arg1 assert Transformer().visit(dummy.arg2) is dummy.arg2 assert Transformer().visit(dummy.arg3) is dummy.arg3",
"arg7 = base, arg8 = [base, base] ) return dummy def test_visitor_no_output(dummy): assert",
"UncachedTransformer(Transformer): def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top = node return self.generic_visit(node)",
"= UncachedTransformer().visit(dummy) assert control.arg7 is not control.arg8[0] assert control.arg8[0] is not control.arg8[1] class",
"['prefix_arg3_1', 'prefix_arg3_2'], 'arg4': [], 'arg5': {'arg5_k': 'prefix_arg5_v'}, 'arg6': {'arg6_k': None}, 'arg7': None, 'arg8':",
"align.schema.visitor import Visitor, Transformer, cache @pytest.fixture def dummy(): class DummyModel(BaseModel): arg1: str arg2:",
"node): return node assert StrValVisitor().visit(dummy) == [ 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1',",
"arg4 = [], arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None}, arg7 =",
"'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', ] def test_visitor_processed_output(dummy): class DummyCounter(Visitor):",
"assert Visitor().visit(dummy) == [] def test_visitor_raw_output(dummy): class StrValVisitor(Visitor): def visit_str(self, node): return node",
"self.top = node return self.generic_visit(node) else: return node.copy() transformed = CachedTransformer().visit(dummy) assert transformed.arg7",
"= node return self.generic_visit(node) else: return node.copy() control = UncachedTransformer().visit(dummy) assert control.arg7 is",
"return self.generic_visit(node) else: return node.copy() transformed = CachedTransformer().visit(dummy) assert transformed.arg7 is transformed.arg8[0] assert",
"DO THIS FOR MOST VISITORS def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top",
"is not dummy.arg1 # No string in subtree assert transformed.arg2 == None assert",
"'arg5_v'}, arg6 = {'arg6_k': None}, arg7 = base, arg8 = [base, base] )",
"'''Simply counts the number of times the dummy class is encountered''' def visit_DummyModel(self,",
"arg3 = ['arg3_1', 'arg3_2'], arg4 = [], arg5 = {'arg5_k': 'arg5_v'}, arg6 =",
"subtree assert transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2'] assert transformed.arg3 is not dummy.arg3 # No",
"not dummy.arg3 # No string in subtree assert transformed.arg4 == [] assert transformed.arg4",
"f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})' # String in subtree assert transformed.arg5 == {'arg5_k': 'prefix_arg5_v'}",
"dummy.arg8 # Ensure cache is working for generic_visitor assert transformed.arg7 is transformed.arg8[0] assert",
"'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', ] def test_visitor_processed_output(dummy): class",
"cache is working for generic_visitor assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0] is transformed.arg8[1]",
"test_cache(dummy): class UncachedTransformer(Transformer): def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top = node",
"arg5 = {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None} ) dummy = DummyModel( arg1",
"String in subtree assert transformed.arg7 == basedict assert transformed.arg7 is not dummy.arg7 #",
"base] ) return dummy def test_visitor_no_output(dummy): assert Visitor().visit(dummy) == [] def test_visitor_raw_output(dummy): class",
"Optional[str] arg3: List[str] arg4: List[Optional[str]] arg5: Dict[str, str] arg6: Dict[str, Optional[str]] arg7: \"Optional[DummyModel]\"",
"== ['prefix_arg3_1', 'prefix_arg3_2'] assert transformed.arg3 is not dummy.arg3 # No string in subtree",
"subtree assert transformed.arg1 == 'prefix_arg1' assert transformed.arg1 is not dummy.arg1 # No string",
"@cache # DO THIS FOR MOST VISITORS def visit_DummyModel(self, node): if not hasattr(self,",
"is dummy def test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer): def visit_str(self, node): return 'prefix_' + node",
"in subtree assert transformed.arg2 == None assert transformed.arg2 is dummy.arg2 # String in",
"dummy.arg4, f'old:({id(dummy.arg4)}, {dummy.arg4}), new:({id(transformed.arg4)}, {transformed.arg4})' # String in subtree assert transformed.arg5 == {'arg5_k':",
"cache @pytest.fixture def dummy(): class DummyModel(BaseModel): arg1: str arg2: Optional[str] arg3: List[str] arg4:",
"subtree assert transformed.arg7 == basedict assert transformed.arg7 is not dummy.arg7 # String in",
"'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', ] def test_visitor_processed_output(dummy): class DummyCounter(Visitor): '''Simply counts",
"transformed.arg8 == [basedict, basedict] assert transformed.arg8 is not dummy.arg8 # Ensure cache is",
"def visit_str(self, node): return 'prefix_' + node transformed = AddStringPrefix().visit(dummy) assert isinstance(transformed, dummy.__class__)",
"Transformer, cache @pytest.fixture def dummy(): class DummyModel(BaseModel): arg1: str arg2: Optional[str] arg3: List[str]",
"assert Transformer().visit(dummy.arg7) is dummy.arg7 assert Transformer().visit(dummy.arg8) is dummy.arg8 assert Transformer().visit(dummy) is dummy def",
"= AddStringPrefix().visit(dummy) assert isinstance(transformed, dummy.__class__) # String in subtree assert transformed.arg1 == 'prefix_arg1'",
"in subtree assert transformed.arg3 == ['prefix_arg3_1', 'prefix_arg3_2'] assert transformed.arg3 is not dummy.arg3 #",
"sum(self.generic_visit(node)) + 1 assert DummyCounter().visit(dummy) == 4 def test_transformer_no_visitor(dummy): assert Transformer().visit(dummy.arg1) is dummy.arg1",
"dummy.arg8 assert Transformer().visit(dummy) is dummy def test_transformer_string_visitor(dummy): class AddStringPrefix(Transformer): def visit_str(self, node): return",
"'prefix_arg5_v'} assert transformed.arg5 is not dummy.arg5 # No string in subtree assert transformed.arg6",
"in subtree assert transformed.arg7 == basedict assert transformed.arg7 is not dummy.arg7 # String",
"'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v',",
"class DummyModel(BaseModel): arg1: str arg2: Optional[str] arg3: List[str] arg4: List[Optional[str]] arg5: Dict[str, str]",
"is working for generic_visitor assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0] is transformed.arg8[1] def",
"None} # String in subtree assert transformed.arg7 == basedict assert transformed.arg7 is not",
"= {'arg6_k': None}, arg7 = base, arg8 = [base, base] ) return dummy",
"Transformer().visit(dummy.arg5) is dummy.arg5 assert Transformer().visit(dummy.arg6) is dummy.arg6 assert Transformer().visit(dummy.arg7) is dummy.arg7 assert Transformer().visit(dummy.arg8)",
"is dummy.arg5 assert Transformer().visit(dummy.arg6) is dummy.arg6 assert Transformer().visit(dummy.arg7) is dummy.arg7 assert Transformer().visit(dummy.arg8) is",
"transformed.arg1 is not dummy.arg1 # No string in subtree assert transformed.arg2 == None",
"number of times the dummy class is encountered''' def visit_DummyModel(self, node): return sum(self.generic_visit(node))",
"import Visitor, Transformer, cache @pytest.fixture def dummy(): class DummyModel(BaseModel): arg1: str arg2: Optional[str]",
"Dict[str, Optional[str]] arg7: \"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base = DummyModel( arg1 = 'arg1',",
"assert Transformer().visit(dummy.arg4) is dummy.arg4 assert Transformer().visit(dummy.arg5) is dummy.arg5 assert Transformer().visit(dummy.arg6) is dummy.arg6 assert",
"visit_str(self, node): return 'prefix_' + node transformed = AddStringPrefix().visit(dummy) assert isinstance(transformed, dummy.__class__) #",
"times the dummy class is encountered''' def visit_DummyModel(self, node): return sum(self.generic_visit(node)) + 1",
"dummy.__class__) # String in subtree assert transformed.arg1 == 'prefix_arg1' assert transformed.arg1 is not",
"= {'arg5_k': 'arg5_v'}, arg6 = {'arg6_k': None} ) dummy = DummyModel( arg1 =",
"dummy.arg7 # String in subtree assert transformed.arg8 == [basedict, basedict] assert transformed.arg8 is",
"control.arg8[0] is not control.arg8[1] class CachedTransformer(Transformer): @cache # DO THIS FOR MOST VISITORS",
"subtree assert transformed.arg8 == [basedict, basedict] assert transformed.arg8 is not dummy.arg8 # Ensure",
"hasattr(self, 'top'): self.top = node return self.generic_visit(node) else: return node.copy() transformed = CachedTransformer().visit(dummy)",
"assert transformed.arg2 == None assert transformed.arg2 is dummy.arg2 # String in subtree assert",
"def test_cache(dummy): class UncachedTransformer(Transformer): def visit_DummyModel(self, node): if not hasattr(self, 'top'): self.top =",
"UncachedTransformer().visit(dummy) assert control.arg7 is not control.arg8[0] assert control.arg8[0] is not control.arg8[1] class CachedTransformer(Transformer):",
"[], 'arg5': {'arg5_k': 'prefix_arg5_v'}, 'arg6': {'arg6_k': None}, 'arg7': None, 'arg8': None} # String",
"BaseModel, Optional, List, Dict from align.schema.visitor import Visitor, Transformer, cache @pytest.fixture def dummy():",
"return dummy def test_visitor_no_output(dummy): assert Visitor().visit(dummy) == [] def test_visitor_raw_output(dummy): class StrValVisitor(Visitor): def",
"string in subtree assert transformed.arg6 == {'arg6_k': None} assert transformed.arg6 is dummy.arg6 #",
"[ 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v', 'arg1', 'arg3_1', 'arg3_2', 'arg5_v',",
"transformed.arg8[0] is transformed.arg8[1] def test_cache(dummy): class UncachedTransformer(Transformer): def visit_DummyModel(self, node): if not hasattr(self,",
"str] arg6: Dict[str, Optional[str]] arg7: \"Optional[DummyModel]\" arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base = DummyModel( arg1",
"class CachedTransformer(Transformer): @cache # DO THIS FOR MOST VISITORS def visit_DummyModel(self, node): if",
"in subtree assert transformed.arg8 == [basedict, basedict] assert transformed.arg8 is not dummy.arg8 #",
"# DO THIS FOR MOST VISITORS def visit_DummyModel(self, node): if not hasattr(self, 'top'):",
"arg8: \"Optional[List[DummyModel]]\" DummyModel.update_forward_refs() base = DummyModel( arg1 = 'arg1', arg3 = ['arg3_1', 'arg3_2'],",
"transformed.arg7 == basedict assert transformed.arg7 is not dummy.arg7 # String in subtree assert",
"generic_visitor assert transformed.arg7 is transformed.arg8[0] assert transformed.arg8[0] is transformed.arg8[1] def test_cache(dummy): class UncachedTransformer(Transformer):"
] |
[
"elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w])",
"1 self.tag_to_vector_map['NONE'] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE' self.num_classes += 1 self.n_sentences_all = len(raw_data_train)",
"file {}\".format(data_file)) with open(data_file, 'r') as f_data: all_sentences_words = [] # Process all",
"+ elem_tags) all_X_train = np.array(all_X_train) all_Y_train = np.array(all_Y_train) print(\"UNK WORD COUNT = \"",
"# Some constants self.num_classes = 0 self.num_embedding_features = 0 self.max_sentence_len_train =0 # Other",
"self.vector_to_tag_map = {} if not (input_resources_pickle_file is None): self.load_resources_pickle_file (input_resources_pickle_file) ################################################## # decode_prediction_sequence",
"w in self.word_to_ix_map : count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map",
"= [] # Process all lines in the file for line in f_data:",
"for seq in raw_data_train: if len(seq[0]) > self.max_sentence_len_train: self.max_sentence_len_train = len(seq[0]) ############## Create",
"line in f_train: line = line.strip() if not line: raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train",
"(self.wordvecs) ################################################## ## read_and_parse_training_data ################################################## def read_and_parse_training_data (self, train_file, output_resources_pickle_file): print(\"Loading the training",
"dtype=np.int32) class_vec[np.argmax(class_prs)] = 1 if tuple(class_vec.tolist()) in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist())) return pred_tags",
"print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT = \" +",
"create_feature_vectors ################################################## def create_feature_vectors(self, all_sentences_words): all_X_data = [] word_seq_list = [] num_tokens_list =",
"lines in the file for line in f_train: line = line.strip() if not",
"pickle_content[\"wordvecs\"] = self.wordvecs pickle_content[\"num_embedding_features\"] = self.num_embedding_features pickle_content[\"num_classes\"] = self.num_classes pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"]",
"raw_tags_train = [] continue word, tag = line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag) if tag not",
"stuff self.wordvecs = None self.word_to_ix_map = {} self.n_sentences_all = 0 self.tag_to_vector_map = {}",
"words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] tag_seq = tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags",
"tag_seq = tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags = [], [] for ix in range(len(word_seq)): w",
"num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y]) +",
"= line.split('\\t') sentence_words.append(word) sentence_tags.append(tag) print(\"number of test examples = \" + str(len(data_set))) self.n_sentences_all",
"len(all_sentences_words) print(\"number of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # create_feature_vectors ##################################################",
"the W2V model from file {}\".format(embeddings_file)) #W2V_model = cPickle.load(open(embeddings_file, \"rb\")) with open(embeddings_file, 'rb')",
"word_seq[ix] t = tag_seq[ix] w = w.lower() if w in self.word_to_ix_map : count",
"for line in f_data: text = line.strip() #break the input text into sentences",
"word_seq_list.append(word_seq) elem_wordvecs = [] for ix in range(len(word_seq)): w = word_seq[ix] w =",
"for sent in sentences: sentence_words = nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words) ) self.n_sentences_all = len(all_sentences_words)",
"= 0 self.num_embedding_features = 0 self.max_sentence_len_train =0 # Other stuff self.wordvecs = None",
"with open(train_file, 'r') as f_train: self.tag_to_vector_map = {} # For storing one hot",
"pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_X_data = np.array(all_X_data) print(\"UNK",
"word_seq = word_seq[:self.max_sentence_len_train] tag_seq = tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags = [], [] for ix",
"= {}\".format(len(self.wordvecs))) self.num_embedding_features = len(self.wordvecs[0]) print(\"embedding size = {}\".format(self.num_embedding_features)) # Add a zero",
"count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w)",
"len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_X_data = np.array(all_X_data) print(\"UNK WORD COUNT = \"",
"unk_words = [] count = 0 for word_seq in all_sentences_words: if len(word_seq) >",
"= np.array(all_Y_test) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT =",
"self.n_sentences_all = len(all_sentences_words) print(\"number of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## #",
"= {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # create_feature_vectors ################################################## def create_feature_vectors(self, all_sentences_words): all_X_data =",
"pickle_content[\"num_embedding_features\"] self.num_classes = pickle_content[\"num_classes\"] self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"]",
"tag_class_id, tag in enumerate(all_tags): one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[tag_class_id] = 1 self.tag_to_vector_map[tag]",
"= {} self.vector_to_tag_map = {} if not (input_resources_pickle_file is None): self.load_resources_pickle_file (input_resources_pickle_file) ##################################################",
"before tokenization sentences = sent_tokenize(text) for sent in sentences: sentence_words = nltk.word_tokenize(sent) all_sentences_words.append(",
"= tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags = [], [] for ix in range(len(word_seq)): w =",
"return pred_tags ################################################## # load_resources_pickle_file ################################################## def load_resources_pickle_file (self, input_resources_pickle_file): print(\"Loading the resources",
"return (all_X_test, all_Y_test, data_set, num_tokens_list) ################################################## # get_feature_vectors_2 ################################################## def get_feature_vectors_2 (self, data_file):",
"def get_feature_vectors_1 (self, data_list): print(\"Reading unlabeled data from dataframe\") # list of list",
"= word_seq[ix] w = w.lower() if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w])",
"= self.wordvecs.shape[0] - 1 print(\"Done\") return (self.wordvecs) ################################################## ## read_and_parse_training_data ################################################## def read_and_parse_training_data",
"print(\"Loading the resources pickle file {}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file, 'rb') as f: pickle_content =",
"sentence_words = [] sentence_tags = [] continue word, tag = line.split('\\t') sentence_words.append(word) sentence_tags.append(tag)",
"in raw_data_train: elem_wordvecs, elem_tags = [], [] for ix in range(len(word_seq)): w =",
"a None Tag one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[self.num_classes] = 1 self.tag_to_vector_map['NONE'] =",
"the file for line in f_test: line = line.strip() if not line: data_set.append(",
"{} self.vector_to_tag_map = {} if not (input_resources_pickle_file is None): self.load_resources_pickle_file (input_resources_pickle_file) ################################################## #",
"= pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos = pickle_content[\"zero_vec_pos\"] ################################################## # load_embedding_lookup_table ################################################## def load_embedding_lookup_table (self, embeddings_file):",
"elem_tags.append(self.tag_to_vector_map[t]) # Pad the sequences for missing entries to make all the sentences",
"unk_words = [] count = 0 for word_seq, tag_seq in raw_data_train: elem_wordvecs, elem_tags",
"################################################## def decode_prediction_sequence (self, pred_seq): pred_tags = [] for class_prs in pred_seq: class_vec",
"pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos = pickle_content[\"zero_vec_pos\"] ################################################## # load_embedding_lookup_table",
"Table for words and their word vectors### print(\"Creating the lookup table\") for index,",
"tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = tag #Adding a None Tag one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32)",
"cPickle.load(f, encoding='bytes') self.word_to_ix_map = pickle_content[\"word_to_ix_map\"] self.wordvecs = pickle_content[\"wordvecs\"] self.num_embedding_features = pickle_content[\"num_embedding_features\"] self.num_classes =",
"# load_embedding_lookup_table ################################################## def load_embedding_lookup_table (self, embeddings_file): ###Load the Word2Vec Model### print(\"Loading the",
"from dataframe\") # list of list of tokens all_sentences_words = [] # Process",
"get_feature_vectors_1 ################################################## def get_feature_vectors_1 (self, data_list): print(\"Reading unlabeled data from dataframe\") # list",
"words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs = [] for",
"if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\" in self.word_to_ix_map :",
"with open(input_resources_pickle_file, 'rb') as f: pickle_content = cPickle.load(f, encoding='bytes') self.word_to_ix_map = pickle_content[\"word_to_ix_map\"] self.wordvecs",
"same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train -",
"= self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X])",
"tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE' self.num_classes += 1 self.n_sentences_all = len(raw_data_train) # Find the",
"else: unk_words.append(w) w = \"UNK\" new_wv = 2 * np.random.randn(self.num_embedding_features) - 1 #",
"of tokens all_sentences_words = [] # Process all lines in the file for",
"np.zeros(self.num_embedding_features))) self.zero_vec_pos = self.wordvecs.shape[0] - 1 print(\"Done\") return (self.wordvecs) ################################################## ## read_and_parse_training_data ##################################################",
"# Other stuff self.wordvecs = None self.word_to_ix_map = {} self.n_sentences_all = 0 self.tag_to_vector_map",
"file for line in f_train: line = line.strip() if not line: raw_data_train.append( (tuple(raw_words_train),",
"################################################## def load_resources_pickle_file (self, input_resources_pickle_file): print(\"Loading the resources pickle file {}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file,",
"self.num_embedding_features = len(self.wordvecs[0]) print(\"embedding size = {}\".format(self.num_embedding_features)) # Add a zero vector for",
"# create_feature_vectors ################################################## def create_feature_vectors(self, all_sentences_words): all_X_data = [] word_seq_list = [] num_tokens_list",
"data_file): print(\"Loading unlabeled data from file {}\".format(data_file)) with open(data_file, 'r') as f_data: all_sentences_words",
"w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map :",
"((pad_length)*[nil_X]) + elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_test = np.array(all_X_test) all_Y_test = np.array(all_Y_test)",
"elem_wordvecs, elem_tags = [], [] for ix in range(len(word_seq)): w = word_seq[ix] w",
"\" + str(count)) print(\"TOTAL WORDS = \" + str(count+len(unk_words))) print(\"Done\") return (all_X_data, word_seq_list,",
"from nltk.tokenize import sent_tokenize import _pickle as cPickle class DataReader: def __init__ (self,",
"self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0]",
"= 0 self.tag_to_vector_map = {} self.vector_to_tag_map = {} if not (input_resources_pickle_file is None):",
"self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE' self.num_classes += 1 self.n_sentences_all = len(raw_data_train) # Find the maximum",
"continue if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in",
"word, tag = line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag) if tag not in self.tag_to_vector_map: self.tag_to_vector_map[tag] =",
"f: pickle_content = cPickle.load(f, encoding='bytes') self.word_to_ix_map = pickle_content[\"word_to_ix_map\"] self.wordvecs = pickle_content[\"wordvecs\"] self.num_embedding_features =",
"table\") for index, word in enumerate(vocab): self.word_to_ix_map[word] = index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs = np.array(self.wordvecs)",
"= np.array(self.wordvecs) print(\"Number of entries in the lookup table = {}\".format(len(self.wordvecs))) self.num_embedding_features =",
"of entries in the lookup table = {}\".format(len(self.wordvecs))) self.num_embedding_features = len(self.wordvecs[0]) print(\"embedding size",
"w = \"UNK\" new_wv = 2 * np.random.randn(self.num_embedding_features) - 1 # sample from",
"#Create TEST feature vectors all_X_test, all_Y_test = [], [] num_tokens_list = [] unk_words",
"[] num_tokens_list = [] unk_words = [] count = 0 for word_seq, tag_seq",
"pickle file {}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file, 'rb') as f: pickle_content = cPickle.load(f, encoding='bytes') self.word_to_ix_map",
"into the file {}\".format(output_resources_pickle_file)) pickle_content = {} pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map pickle_content[\"wordvecs\"] = self.wordvecs",
"np.array(self.tag_to_vector_map['NONE']) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y]) +",
"from file {}\".format(test_file)) with open(test_file, 'r') as f_test: data_set = [] sentence_words =",
"in data_set: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words in the long",
"self.n_sentences_all = 0 self.tag_to_vector_map = {} self.vector_to_tag_map = {} if not (input_resources_pickle_file is",
"hot vector notation raw_data_train = [] raw_words_train = [] raw_tags_train = [] #",
"index, word in enumerate(vocab): self.word_to_ix_map[word] = index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs = np.array(self.wordvecs) print(\"Number of",
"None Tag one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[self.num_classes] = 1 self.tag_to_vector_map['NONE'] = tuple(one_hot_vec)",
"pickle_content[\"num_embedding_features\"] = self.num_embedding_features pickle_content[\"num_classes\"] = self.num_classes pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"]",
"= \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) # Pad the sequences",
"in sentences: sentence_words = nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words) ) self.n_sentences_all = len(all_sentences_words) print(\"number of",
"self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) # Pad the sequences for missing entries",
"the maximum sequence length for Training data self.max_sentence_len_train = 0 for seq in",
"Some constants self.num_classes = 0 self.num_embedding_features = 0 self.max_sentence_len_train =0 # Other stuff",
"in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist())) return pred_tags ################################################## # load_resources_pickle_file ################################################## def load_resources_pickle_file",
"################################################## def save_resources(self, output_resources_pickle_file): print(\"saving the resources into the file {}\".format(output_resources_pickle_file)) pickle_content =",
"= 'NONE' self.num_classes += 1 self.n_sentences_all = len(raw_data_train) # Find the maximum sequence",
"= [] sentence_tags = [] # Process all lines in the file for",
"self.max_sentence_len_train - len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_X_data = np.array(all_X_data) print(\"UNK WORD COUNT",
"print(\"Loading the W2V model from file {}\".format(embeddings_file)) #W2V_model = cPickle.load(open(embeddings_file, \"rb\")) with open(embeddings_file,",
"word_seq, tag_seq in data_set: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words in",
"# Pad the sequences for missing entries to make all the sentences the",
"= [] count = 0 for word_seq, tag_seq in raw_data_train: elem_wordvecs, elem_tags =",
"print(\"Done\") return (all_X_train, all_Y_train) ################################################## # save_resources ################################################## def save_resources(self, output_resources_pickle_file): print(\"saving the",
"= \"UNK\" new_wv = 2 * np.random.randn(self.num_embedding_features) - 1 # sample from normal",
"self.num_embedding_features pickle_content[\"num_classes\"] = self.num_classes pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map",
"print(\"Loading test data from file {}\".format(test_file)) with open(test_file, 'r') as f_test: data_set =",
"from file {}\".format(data_file)) with open(data_file, 'r') as f_data: all_sentences_words = [] # Process",
"lines in the file for line in f_data: text = line.strip() #break the",
"self.create_feature_vectors(all_sentences_words) ################################################## # create_feature_vectors ################################################## def create_feature_vectors(self, all_sentences_words): all_X_data = [] word_seq_list =",
"_pickle as cPickle class DataReader: def __init__ (self, input_resources_pickle_file =None): # Some constants",
"text into sentences before tokenization sentences = sent_tokenize(text) for sent in sentences: sentence_words",
"t = tag_seq[ix] w = w.lower() if w in self.word_to_ix_map : count +=",
"__init__ (self, input_resources_pickle_file =None): # Some constants self.num_classes = 0 self.num_embedding_features = 0",
"= self.num_classes pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] =",
"num_tokens_list = [] unk_words = [] count = 0 for word_seq in all_sentences_words:",
"to make all the sentences the same length nil_X = self.zero_vec_pos nil_Y =",
"sent_tokenize(text) for sent in sentences: sentence_words = nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words) ) self.n_sentences_all =",
"from file {}\".format(embeddings_file)) #W2V_model = cPickle.load(open(embeddings_file, \"rb\")) with open(embeddings_file, 'rb') as f: W2V_model",
"print(\"skip the extra words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs",
"\"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] =",
"ix in range(len(word_seq)): w = word_seq[ix] t = tag_seq[ix] w = w.lower() if",
"sentences = sent_tokenize(text) for sent in sentences: sentence_words = nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words) )",
"elem_tags = [], [] for ix in range(len(word_seq)): w = word_seq[ix] t =",
"in the file for line in data_list: text = line.strip() #break the input",
"print(\"Done\") ################################################## # read_and_parse_test_data ################################################## def read_and_parse_test_data (self, test_file): print(\"Loading test data from",
"Process all lines in the file for line in f_test: line = line.strip()",
"self.wordvecs = np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos = self.wordvecs.shape[0] - 1 print(\"Done\") return (self.wordvecs) ##################################################",
"Model### print(\"Loading the W2V model from file {}\".format(embeddings_file)) #W2V_model = cPickle.load(open(embeddings_file, \"rb\")) with",
"all_X_test, all_Y_test = [], [] num_tokens_list = [] unk_words = [] count =",
"if tag not in self.tag_to_vector_map: self.tag_to_vector_map[tag] = None self.num_classes += 1 print(\"number of",
"- len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_test = np.array(all_X_test)",
"#break the input text into sentences before tokenization sentences = sent_tokenize(text) for sent",
"for word_seq, tag_seq in raw_data_train: elem_wordvecs, elem_tags = [], [] for ix in",
"(all_X_test, all_Y_test, data_set, num_tokens_list) ################################################## # get_feature_vectors_2 ################################################## def get_feature_vectors_2 (self, data_file): print(\"Loading",
"tag_seq[ix] #ignore the word if it has uncovered ground truth entity type if",
"elem_wordvecs, elem_tags = [], [] for ix in range(len(word_seq)): w = word_seq[ix] t",
"unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1",
"print(\"Done\") return (all_X_test, all_Y_test, data_set, num_tokens_list) ################################################## # get_feature_vectors_2 ################################################## def get_feature_vectors_2 (self,",
"# get_feature_vectors_1 ################################################## def get_feature_vectors_1 (self, data_list): print(\"Reading unlabeled data from dataframe\") #",
"= 1 if tuple(class_vec.tolist()) in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist())) return pred_tags ################################################## #",
"for Training data self.max_sentence_len_train = 0 for seq in raw_data_train: if len(seq[0]) >",
"truth entity type if not (t in self.tag_to_vector_map): continue if w in self.word_to_ix_map:",
"in all_sentences_words: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words in the long",
"Training data self.max_sentence_len_train = 0 for seq in raw_data_train: if len(seq[0]) > self.max_sentence_len_train:",
"tuple(sentence_tags))) sentence_words = [] sentence_tags = [] continue word, tag = line.split('\\t') sentence_words.append(word)",
"normal distribution norm_const = np.linalg.norm(new_wv) new_wv /= norm_const self.wordvecs = np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w]",
"{} pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map pickle_content[\"wordvecs\"] = self.wordvecs pickle_content[\"num_embedding_features\"] = self.num_embedding_features pickle_content[\"num_classes\"] = self.num_classes",
"as f_test: data_set = [] sentence_words = [] sentence_tags = [] # Process",
"for line in f_test: line = line.strip() if not line: data_set.append( (tuple(sentence_words), tuple(sentence_tags)))",
"################################################## def get_feature_vectors_2 (self, data_file): print(\"Loading unlabeled data from file {}\".format(data_file)) with open(data_file,",
"###Create LookUp Table for words and their word vectors### print(\"Creating the lookup table\")",
"{}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # get_feature_vectors_1 ################################################## def get_feature_vectors_1 (self, data_list): print(\"Reading unlabeled",
"## read_and_parse_training_data ################################################## def read_and_parse_training_data (self, train_file, output_resources_pickle_file): print(\"Loading the training data from",
"[] unk_words = [] count = 0 for word_seq, tag_seq in raw_data_train: elem_wordvecs,",
"= [] num_tokens_list = [] unk_words = [] count = 0 for word_seq",
"= line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag) if tag not in self.tag_to_vector_map: self.tag_to_vector_map[tag] = None self.num_classes",
"# Find the maximum sequence length for Training data self.max_sentence_len_train = 0 for",
"get_feature_vectors_1 (self, data_list): print(\"Reading unlabeled data from dataframe\") # list of list of",
"same length nil_X = self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X])",
"the same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train",
"Tag self.vector_to_tag_map = {} self.num_classes = 0 # Used to put 1 in",
"print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT= \" + str(count+len(unk_words)))",
"word_seq[ix] w = w.lower() if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elif",
"self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs = np.array(self.wordvecs) print(\"Number of entries in the lookup table = {}\".format(len(self.wordvecs)))",
"resources into the file {}\".format(output_resources_pickle_file)) pickle_content = {} pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map pickle_content[\"wordvecs\"] =",
"elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w",
"range(len(word_seq)): w = word_seq[ix] w = w.lower() if w in self.word_to_ix_map: count +=",
"the sequences for missing entries to make them all the same length nil_X",
"WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count))",
"raw_words_train = [] raw_tags_train = [] # Process all lines in the file",
"type if not (t in self.tag_to_vector_map): continue if w in self.word_to_ix_map: count +=",
"= [] ###Create LookUp Table for words and their word vectors### print(\"Creating the",
"= self.wordvecs pickle_content[\"num_embedding_features\"] = self.num_embedding_features pickle_content[\"num_classes\"] = self.num_classes pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] =",
"# read_and_parse_test_data ################################################## def read_and_parse_test_data (self, test_file): print(\"Loading test data from file {}\".format(test_file))",
"in the lookup table = {}\".format(len(self.wordvecs))) self.num_embedding_features = len(self.wordvecs[0]) print(\"embedding size = {}\".format(self.num_embedding_features))",
"Used to put 1 in the one hot vector notation raw_data_train = []",
"text = line.strip() #break the input text into sentences before tokenization sentences =",
"model from file {}\".format(embeddings_file)) #W2V_model = cPickle.load(open(embeddings_file, \"rb\")) with open(embeddings_file, 'rb') as f:",
"len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_test = np.array(all_X_test) all_Y_test",
"pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos = pickle_content[\"zero_vec_pos\"] ################################################## # load_embedding_lookup_table ################################################## def load_embedding_lookup_table",
"print(\"embedding size = {}\".format(self.num_embedding_features)) # Add a zero vector for the Paddings self.wordvecs",
"all_Y_train) ################################################## # save_resources ################################################## def save_resources(self, output_resources_pickle_file): print(\"saving the resources into the",
"w = word_seq[ix] t = tag_seq[ix] w = w.lower() if w in self.word_to_ix_map",
"elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t])",
"np.linalg.norm(new_wv) new_wv /= norm_const self.wordvecs = np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1",
"1 elem_wordvecs.append(self.word_to_ix_map[w]) # Pad the sequences for missing entries to make them all",
"= [] count = 0 for word_seq, tag_seq in data_set: if len(word_seq) >",
"+= 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else:",
"the file for line in f_train: line = line.strip() if not line: raw_data_train.append(",
"= tag_seq[ix] #ignore the word if it has uncovered ground truth entity type",
"len(all_sentences_words) print(\"number of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # get_feature_vectors_1 ##################################################",
"############## Create Train Vectors################ all_X_train, all_Y_train = [], [] unk_words = [] count",
"= pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos = pickle_content[\"zero_vec_pos\"] ################################################## #",
"{}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # create_feature_vectors ################################################## def create_feature_vectors(self, all_sentences_words): all_X_data = []",
"= self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\")) print(\"Done\") ################################################## # read_and_parse_test_data ##################################################",
"[] # Process all lines in the file for line in data_list: text",
"self.tag_to_vector_map = {} # For storing one hot vector notation for each Tag",
"data from file {}\".format(data_file)) with open(data_file, 'r') as f_data: all_sentences_words = [] #",
"elem_wordvecs = [] for ix in range(len(word_seq)): w = word_seq[ix] w = w.lower()",
"of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # create_feature_vectors ################################################## def create_feature_vectors(self,",
"nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X])",
"print(\"Done\") return (self.wordvecs) ################################################## ## read_and_parse_training_data ################################################## def read_and_parse_training_data (self, train_file, output_resources_pickle_file): print(\"Loading",
"word vectors### print(\"Creating the lookup table\") for index, word in enumerate(vocab): self.word_to_ix_map[word] =",
"in raw_data_train: if len(seq[0]) > self.max_sentence_len_train: self.max_sentence_len_train = len(seq[0]) ############## Create Train Vectors################",
"sequences for missing entries to make all the sentences the same length nil_X",
"else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) #",
"= len(raw_data_train) # Find the maximum sequence length for Training data self.max_sentence_len_train =",
"(self, data_list): print(\"Reading unlabeled data from dataframe\") # list of list of tokens",
"pickle_content[\"num_classes\"] self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos = pickle_content[\"zero_vec_pos\"]",
"= pickle_content[\"wordvecs\"] self.num_embedding_features = pickle_content[\"num_embedding_features\"] self.num_classes = pickle_content[\"num_classes\"] self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map =",
"self.zero_vec_pos = pickle_content[\"zero_vec_pos\"] ################################################## # load_embedding_lookup_table ################################################## def load_embedding_lookup_table (self, embeddings_file): ###Load the",
"= len(data_set) #Create TEST feature vectors all_X_test, all_Y_test = [], [] num_tokens_list =",
"\"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) # Pad the sequences for",
"for tag_class_id, tag in enumerate(all_tags): one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[tag_class_id] = 1",
"lines in the file for line in data_list: text = line.strip() #break the",
"all_X_test.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_test = np.array(all_X_test) all_Y_test =",
"= np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos = self.wordvecs.shape[0] - 1 print(\"Done\") return (self.wordvecs) ################################################## ##",
"tag = line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag) if tag not in self.tag_to_vector_map: self.tag_to_vector_map[tag] = None",
"[] count = 0 for word_seq, tag_seq in data_set: if len(word_seq) > self.max_sentence_len_train:",
"np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) # Pad the sequences",
"((pad_length)*[nil_Y]) + elem_tags) all_X_train = np.array(all_X_train) all_Y_train = np.array(all_Y_train) print(\"UNK WORD COUNT =",
"the resources into the file {}\".format(output_resources_pickle_file)) pickle_content = {} pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map pickle_content[\"wordvecs\"]",
"the lookup table\") for index, word in enumerate(vocab): self.word_to_ix_map[word] = index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs",
"print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS = \" + str(count+len(unk_words)))",
"# Process all lines in the file for line in f_test: line =",
"sorted(list(self.tag_to_vector_map.keys())) for tag_class_id, tag in enumerate(all_tags): one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[tag_class_id] =",
"line in f_data: text = line.strip() #break the input text into sentences before",
"pickle_content = {} pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map pickle_content[\"wordvecs\"] = self.wordvecs pickle_content[\"num_embedding_features\"] = self.num_embedding_features pickle_content[\"num_classes\"]",
"self.zero_vec_pos = self.wordvecs.shape[0] - 1 print(\"Done\") return (self.wordvecs) ################################################## ## read_and_parse_training_data ################################################## def",
"# load_resources_pickle_file ################################################## def load_resources_pickle_file (self, input_resources_pickle_file): print(\"Loading the resources pickle file {}\".format(input_resources_pickle_file))",
"1 in the one hot vector notation raw_data_train = [] raw_words_train = []",
"if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map",
"= len(seq[0]) ############## Create Train Vectors################ all_X_train, all_Y_train = [], [] unk_words =",
"f_data: all_sentences_words = [] # Process all lines in the file for line",
"w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w)",
"################################################## # load_resources_pickle_file ################################################## def load_resources_pickle_file (self, input_resources_pickle_file): print(\"Loading the resources pickle file",
"pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\")) print(\"Done\") ################################################## # read_and_parse_test_data ################################################## def read_and_parse_test_data",
"word_seq_list = [] num_tokens_list = [] unk_words = [] count = 0 for",
"all_X_data.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_X_data = np.array(all_X_data) print(\"UNK WORD COUNT = \" +",
"in range(len(word_seq)): w = word_seq[ix] w = w.lower() t = tag_seq[ix] #ignore the",
"COUNT = \" + str(count)) print(\"TOTAL WORDS = \" + str(count+len(unk_words))) print(\"Done\") return",
"= {} self.n_sentences_all = 0 self.tag_to_vector_map = {} self.vector_to_tag_map = {} if not",
"[] for ix in range(len(word_seq)): w = word_seq[ix] t = tag_seq[ix] w =",
"is None): self.load_resources_pickle_file (input_resources_pickle_file) ################################################## # decode_prediction_sequence ################################################## def decode_prediction_sequence (self, pred_seq): pred_tags",
"in the one hot vector notation raw_data_train = [] raw_words_train = [] raw_tags_train",
"# save_resources ################################################## def save_resources(self, output_resources_pickle_file): print(\"saving the resources into the file {}\".format(output_resources_pickle_file))",
"= np.array(all_X_test) all_Y_test = np.array(all_Y_test) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found",
"in f_test: line = line.strip() if not line: data_set.append( (tuple(sentence_words), tuple(sentence_tags))) sentence_words =",
"word_seq = word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs = [] for ix in range(len(word_seq)): w =",
"range(len(word_seq)): w = word_seq[ix] t = tag_seq[ix] w = w.lower() if w in",
"make them all the same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) pad_length",
"in enumerate(all_tags): one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[tag_class_id] = 1 self.tag_to_vector_map[tag] = tuple(one_hot_vec)",
"list(W2V_model.keys()) self.word_to_ix_map = {} self.wordvecs = [] ###Create LookUp Table for words and",
"print(\"TOTAL WORDS COUNT= \" + str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\") return (all_X_train, all_Y_train) ################################################## #",
"= np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) # Pad the",
"line in data_list: text = line.strip() #break the input text into sentences before",
"cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\")) print(\"Done\") ################################################## # read_and_parse_test_data ################################################## def read_and_parse_test_data (self, test_file): print(\"Loading",
"entries to make all the sentences the same length nil_X = self.zero_vec_pos nil_Y",
"#W2V_model = cPickle.load(open(embeddings_file, \"rb\")) with open(embeddings_file, 'rb') as f: W2V_model = cPickle.load(f, encoding='bytes')",
"print(tuple(class_vec.tolist())) return pred_tags ################################################## # load_resources_pickle_file ################################################## def load_resources_pickle_file (self, input_resources_pickle_file): print(\"Loading the",
"= 0 self.max_sentence_len_train =0 # Other stuff self.wordvecs = None self.word_to_ix_map = {}",
"self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\")) print(\"Done\") ################################################## # read_and_parse_test_data ################################################## def",
"examples = \" + str(len(raw_data_train))) all_tags = sorted(list(self.tag_to_vector_map.keys())) for tag_class_id, tag in enumerate(all_tags):",
"sequences for missing entries to make them all the same length nil_X =",
"= self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file,",
"> self.max_sentence_len_train: print(\"skip the extra words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train]",
"word_seq[:self.max_sentence_len_train] tag_seq = tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags = [], [] for ix in range(len(word_seq)):",
"print(\"TOTAL WORDS COUNT = \" + str(count+len(unk_words))) print(\"Done\") return (all_X_test, all_Y_test, data_set, num_tokens_list)",
"sentence_words = nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words) ) self.n_sentences_all = len(all_sentences_words) print(\"number of unlabeled examples",
"as np import nltk from nltk.tokenize import sent_tokenize import _pickle as cPickle class",
"1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w)",
"= \" + str(count)) print(\"TOTAL WORDS COUNT = \" + str(count+len(unk_words))) print(\"Done\") return",
"= [] word_seq_list = [] num_tokens_list = [] unk_words = [] count =",
"################################################## # save_resources ################################################## def save_resources(self, output_resources_pickle_file): print(\"saving the resources into the file",
"np.random.randn(self.num_embedding_features) - 1 # sample from normal distribution norm_const = np.linalg.norm(new_wv) new_wv /=",
"DataReader: def __init__ (self, input_resources_pickle_file =None): # Some constants self.num_classes = 0 self.num_embedding_features",
"raw_data_train = [] raw_words_train = [] raw_tags_train = [] # Process all lines",
"has uncovered ground truth entity type if not (t in self.tag_to_vector_map): continue if",
"[] unk_words = [] count = 0 for word_seq in all_sentences_words: if len(word_seq)",
"new_wv)) self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) # Pad the sequences for",
"data from dataframe\") # list of list of tokens all_sentences_words = [] #",
"[] sentence_words = [] sentence_tags = [] # Process all lines in the",
"all lines in the file for line in f_train: line = line.strip() if",
"[] # Process all lines in the file for line in f_train: line",
"self.num_embedding_features = pickle_content[\"num_embedding_features\"] self.num_classes = pickle_content[\"num_classes\"] self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map",
"= [] continue word, tag = line.split('\\t') sentence_words.append(word) sentence_tags.append(tag) print(\"number of test examples",
"elem_tags) all_X_test = np.array(all_X_test) all_Y_test = np.array(all_Y_test) print(\"UNK WORD COUNT = \" +",
"= pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos = pickle_content[\"zero_vec_pos\"] ################################################## # load_embedding_lookup_table ################################################## def",
"################################################## # load_embedding_lookup_table ################################################## def load_embedding_lookup_table (self, embeddings_file): ###Load the Word2Vec Model### print(\"Loading",
"self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) # Pad the sequences for missing entries to make",
"= 1 self.tag_to_vector_map['NONE'] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE' self.num_classes += 1 self.n_sentences_all =",
"for class_prs in pred_seq: class_vec = np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)] = 1 if tuple(class_vec.tolist())",
"test examples = \" + str(len(data_set))) self.n_sentences_all = len(data_set) #Create TEST feature vectors",
"data_set = [] sentence_words = [] sentence_tags = [] # Process all lines",
"Pad the sequences for missing entries to make all the sentences the same",
"all_sentences_words = [] # Process all lines in the file for line in",
"for words and their word vectors### print(\"Creating the lookup table\") for index, word",
"hot vector notation for each Tag self.vector_to_tag_map = {} self.num_classes = 0 #",
"= self.num_embedding_features pickle_content[\"num_classes\"] = self.num_classes pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] =",
"file {}\".format(embeddings_file)) #W2V_model = cPickle.load(open(embeddings_file, \"rb\")) with open(embeddings_file, 'rb') as f: W2V_model =",
"word if it has uncovered ground truth entity type if not (t in",
"= pickle_content[\"zero_vec_pos\"] ################################################## # load_embedding_lookup_table ################################################## def load_embedding_lookup_table (self, embeddings_file): ###Load the Word2Vec",
"norm_const = np.linalg.norm(new_wv) new_wv /= norm_const self.wordvecs = np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w] = self.wordvecs.shape[0]",
"numpy as np import nltk from nltk.tokenize import sent_tokenize import _pickle as cPickle",
"entries in the lookup table = {}\".format(len(self.wordvecs))) self.num_embedding_features = len(self.wordvecs[0]) print(\"embedding size =",
"/= norm_const self.wordvecs = np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t]))",
"= None self.word_to_ix_map = {} self.n_sentences_all = 0 self.tag_to_vector_map = {} self.vector_to_tag_map =",
"pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist())) return pred_tags ################################################## # load_resources_pickle_file ################################################## def load_resources_pickle_file (self, input_resources_pickle_file):",
"COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT= \" + str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\")",
"raw_tags_train = [] # Process all lines in the file for line in",
"file {}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file, 'rb') as f: pickle_content = cPickle.load(f, encoding='bytes') self.word_to_ix_map =",
"# Used to put 1 in the one hot vector notation raw_data_train =",
"sentence_words.append(word) sentence_tags.append(tag) print(\"number of test examples = \" + str(len(data_set))) self.n_sentences_all = len(data_set)",
"the lookup table = {}\".format(len(self.wordvecs))) self.num_embedding_features = len(self.wordvecs[0]) print(\"embedding size = {}\".format(self.num_embedding_features)) #",
"word in enumerate(vocab): self.word_to_ix_map[word] = index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs = np.array(self.wordvecs) print(\"Number of entries",
"1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) # Pad the sequences for missing entries to make all",
"all lines in the file for line in data_list: text = line.strip() #break",
"self.max_sentence_len_train - len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_test =",
"examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # create_feature_vectors ################################################## def create_feature_vectors(self, all_sentences_words): all_X_data",
"self.word_to_ix_map : count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w)",
"the same length nil_X = self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_data.append(",
"storing one hot vector notation for each Tag self.vector_to_tag_map = {} self.num_classes =",
"self.tag_to_vector_map = {} self.vector_to_tag_map = {} if not (input_resources_pickle_file is None): self.load_resources_pickle_file (input_resources_pickle_file)",
"length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs)",
"((pad_length)*[nil_X]) + elem_wordvecs) all_X_data = np.array(all_X_data) print(\"UNK WORD COUNT = \" + str(len(unk_words)))",
"# Process all lines in the file for line in f_train: line =",
"the file for line in data_list: text = line.strip() #break the input text",
"(input_resources_pickle_file) ################################################## # decode_prediction_sequence ################################################## def decode_prediction_sequence (self, pred_seq): pred_tags = [] for",
"line.split('\\t') sentence_words.append(word) sentence_tags.append(tag) print(\"number of test examples = \" + str(len(data_set))) self.n_sentences_all =",
"to put 1 in the one hot vector notation raw_data_train = [] raw_words_train",
"word_seq in all_sentences_words: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words in the",
"raw_data_train: if len(seq[0]) > self.max_sentence_len_train: self.max_sentence_len_train = len(seq[0]) ############## Create Train Vectors################ all_X_train,",
"Create Train Vectors################ all_X_train, all_Y_train = [], [] unk_words = [] count =",
"+ str(len(data_set))) self.n_sentences_all = len(data_set) #Create TEST feature vectors all_X_test, all_Y_test = [],",
"= sent_tokenize(text) for sent in sentences: sentence_words = nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words) ) self.n_sentences_all",
"nil_X = self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X]) + elem_wordvecs)",
"for missing entries to make them all the same length nil_X = self.zero_vec_pos",
"return self.create_feature_vectors(all_sentences_words) ################################################## # get_feature_vectors_1 ################################################## def get_feature_vectors_1 (self, data_list): print(\"Reading unlabeled data",
"= line.strip() if not line: raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train = [] raw_tags_train =",
"the one hot vector notation raw_data_train = [] raw_words_train = [] raw_tags_train =",
"print(\"number of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # get_feature_vectors_1 ################################################## def",
"entity type if not (t in self.tag_to_vector_map): continue if w in self.word_to_ix_map: count",
"#Adding a None Tag one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[self.num_classes] = 1 self.tag_to_vector_map['NONE']",
"self.word_to_ix_map pickle_content[\"wordvecs\"] = self.wordvecs pickle_content[\"num_embedding_features\"] = self.num_embedding_features pickle_content[\"num_classes\"] = self.num_classes pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train",
"if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words in the long sentence\") word_seq",
"w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) # Pad the",
"print(\"Number of entries in the lookup table = {}\".format(len(self.wordvecs))) self.num_embedding_features = len(self.wordvecs[0]) print(\"embedding",
"nltk.tokenize import sent_tokenize import _pickle as cPickle class DataReader: def __init__ (self, input_resources_pickle_file",
"tokens all_sentences_words = [] # Process all lines in the file for line",
"pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\")) print(\"Done\")",
"= [] sentence_tags = [] continue word, tag = line.split('\\t') sentence_words.append(word) sentence_tags.append(tag) print(\"number",
"elem_tags = [], [] for ix in range(len(word_seq)): w = word_seq[ix] w =",
"for missing entries to make all the sentences the same length nil_X =",
"1 elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w) w =",
"Find the maximum sequence length for Training data self.max_sentence_len_train = 0 for seq",
"open(train_file, 'r') as f_train: self.tag_to_vector_map = {} # For storing one hot vector",
"all_Y_train = np.array(all_Y_train) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT",
"unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" new_wv = 2 * np.random.randn(self.num_embedding_features)",
"= cPickle.load(f, encoding='bytes') self.word_to_ix_map = pickle_content[\"word_to_ix_map\"] self.wordvecs = pickle_content[\"wordvecs\"] self.num_embedding_features = pickle_content[\"num_embedding_features\"] self.num_classes",
"self.tag_to_vector_map: self.tag_to_vector_map[tag] = None self.num_classes += 1 print(\"number of training examples = \"",
"not line: raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train = [] raw_tags_train = [] continue word,",
"constants self.num_classes = 0 self.num_embedding_features = 0 self.max_sentence_len_train =0 # Other stuff self.wordvecs",
"+= 1 print(\"number of training examples = \" + str(len(raw_data_train))) all_tags = sorted(list(self.tag_to_vector_map.keys()))",
"np.array(all_X_data) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT = \"",
"\" + str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT=",
"unk_words.append(w) w = \"UNK\" new_wv = 2 * np.random.randn(self.num_embedding_features) - 1 # sample",
"print(\"number of training examples = \" + str(len(raw_data_train))) all_tags = sorted(list(self.tag_to_vector_map.keys())) for tag_class_id,",
"count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t])",
"Other stuff self.wordvecs = None self.word_to_ix_map = {} self.n_sentences_all = 0 self.tag_to_vector_map =",
"= \" + str(len(data_set))) self.n_sentences_all = len(data_set) #Create TEST feature vectors all_X_test, all_Y_test",
"list of tokens all_sentences_words = [] # Process all lines in the file",
"+ str(count)) print(\"TOTAL WORDS = \" + str(count+len(unk_words))) print(\"Done\") return (all_X_data, word_seq_list, num_tokens_list)",
"= np.linalg.norm(new_wv) new_wv /= norm_const self.wordvecs = np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w] = self.wordvecs.shape[0] -",
"= np.array(self.tag_to_vector_map['NONE']) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y])",
"the file {}\".format(output_resources_pickle_file)) pickle_content = {} pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map pickle_content[\"wordvecs\"] = self.wordvecs pickle_content[\"num_embedding_features\"]",
"for ix in range(len(word_seq)): w = word_seq[ix] t = tag_seq[ix] w = w.lower()",
"output_resources_pickle_file): print(\"saving the resources into the file {}\".format(output_resources_pickle_file)) pickle_content = {} pickle_content[\"word_to_ix_map\"] =",
"all lines in the file for line in f_test: line = line.strip() if",
"elem_wordvecs.append(self.word_to_ix_map[w]) # Pad the sequences for missing entries to make them all the",
"self.tag_to_vector_map['NONE'] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE' self.num_classes += 1 self.n_sentences_all = len(raw_data_train) #",
"[] num_tokens_list = [] unk_words = [] count = 0 for word_seq in",
"the file for line in f_data: text = line.strip() #break the input text",
"input text into sentences before tokenization sentences = sent_tokenize(text) for sent in sentences:",
"COUNT= \" + str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\") return (all_X_train, all_Y_train) ################################################## # save_resources ##################################################",
"self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\"))",
"if not (input_resources_pickle_file is None): self.load_resources_pickle_file (input_resources_pickle_file) ################################################## # decode_prediction_sequence ################################################## def decode_prediction_sequence",
"(tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train = [] raw_tags_train = [] continue word, tag = line.split('\\t')",
"training data from file {}\".format(train_file)) with open(train_file, 'r') as f_train: self.tag_to_vector_map = {}",
"tag_seq in data_set: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words in the",
"Vectors################ all_X_train, all_Y_train = [], [] unk_words = [] count = 0 for",
"the same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) pad_length = self.max_sentence_len_train -",
"train_file, output_resources_pickle_file): print(\"Loading the training data from file {}\".format(train_file)) with open(train_file, 'r') as",
"line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag) if tag not in self.tag_to_vector_map: self.tag_to_vector_map[tag] = None self.num_classes +=",
"self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) # Pad the sequences for missing",
"read_and_parse_training_data ################################################## def read_and_parse_training_data (self, train_file, output_resources_pickle_file): print(\"Loading the training data from file",
"* np.random.randn(self.num_embedding_features) - 1 # sample from normal distribution norm_const = np.linalg.norm(new_wv) new_wv",
"else: print(tuple(class_vec.tolist())) return pred_tags ################################################## # load_resources_pickle_file ################################################## def load_resources_pickle_file (self, input_resources_pickle_file): print(\"Loading",
"elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w]",
"WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS = \" + str(count+len(unk_words))) print(\"Done\")",
"tuple(class_vec.tolist()) in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist())) return pred_tags ################################################## # load_resources_pickle_file ################################################## def",
"vectors### print(\"Creating the lookup table\") for index, word in enumerate(vocab): self.word_to_ix_map[word] = index",
"raw_words_train.append(word) raw_tags_train.append(tag) if tag not in self.tag_to_vector_map: self.tag_to_vector_map[tag] = None self.num_classes += 1",
"= np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_test.append(",
"make all the sentences the same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE'])",
"dataframe\") # list of list of tokens all_sentences_words = [] # Process all",
"len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words in the long sentence\") word_seq =",
"= {} pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map pickle_content[\"wordvecs\"] = self.wordvecs pickle_content[\"num_embedding_features\"] = self.num_embedding_features pickle_content[\"num_classes\"] =",
"in the file for line in f_test: line = line.strip() if not line:",
"to make them all the same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE'])",
"with open(test_file, 'r') as f_test: data_set = [] sentence_words = [] sentence_tags =",
"pickle_content[\"num_classes\"] = self.num_classes pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map pickle_content[\"zero_vec_pos\"]",
"- len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_X_data = np.array(all_X_data) print(\"UNK WORD COUNT =",
"\"rb\")) with open(embeddings_file, 'rb') as f: W2V_model = cPickle.load(f, encoding='bytes') vocab = list(W2V_model.keys())",
"ix in range(len(word_seq)): w = word_seq[ix] w = w.lower() if w in self.word_to_ix_map:",
"line = line.strip() if not line: raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train = [] raw_tags_train",
"elem_tags.append(list(self.tag_to_vector_map[t])) # Pad the sequences for missing entries to make them all the",
"one_hot_vec[tag_class_id] = 1 self.tag_to_vector_map[tag] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = tag #Adding a None Tag",
"for line in data_list: text = line.strip() #break the input text into sentences",
"line: raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train = [] raw_tags_train = [] continue word, tag",
"sentence\") word_seq = word_seq[:self.max_sentence_len_train] tag_seq = tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags = [], [] for",
"ground truth entity type if not (t in self.tag_to_vector_map): continue if w in",
"encoding='bytes') vocab = list(W2V_model.keys()) self.word_to_ix_map = {} self.wordvecs = [] ###Create LookUp Table",
"not (t in self.tag_to_vector_map): continue if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w])",
"len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_train = np.array(all_X_train) all_Y_train",
"pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos cPickle.dump(pickle_content,",
"in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] =",
"self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" new_wv = 2",
"with open(data_file, 'r') as f_data: all_sentences_words = [] # Process all lines in",
"f: W2V_model = cPickle.load(f, encoding='bytes') vocab = list(W2V_model.keys()) self.word_to_ix_map = {} self.wordvecs =",
"= 0 # Used to put 1 in the one hot vector notation",
"Word2Vec Model### print(\"Loading the W2V model from file {}\".format(embeddings_file)) #W2V_model = cPickle.load(open(embeddings_file, \"rb\"))",
"all_X_train, all_Y_train = [], [] unk_words = [] count = 0 for word_seq,",
"nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words) ) self.n_sentences_all = len(all_sentences_words) print(\"number of unlabeled examples = {}\".format(self.n_sentences_all))",
"self.wordvecs = np.array(self.wordvecs) print(\"Number of entries in the lookup table = {}\".format(len(self.wordvecs))) self.num_embedding_features",
"\"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) # Pad the sequences for missing",
"[] sentence_tags = [] continue word, tag = line.split('\\t') sentence_words.append(word) sentence_tags.append(tag) print(\"number of",
"[] for class_prs in pred_seq: class_vec = np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)] = 1 if",
"[], [] num_tokens_list = [] unk_words = [] count = 0 for word_seq,",
"[], [] for ix in range(len(word_seq)): w = word_seq[ix] t = tag_seq[ix] w",
"WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT= \" + str(count+len(unk_words))) self.save_resources(output_resources_pickle_file)",
"length for Training data self.max_sentence_len_train = 0 for seq in raw_data_train: if len(seq[0])",
"= len(all_sentences_words) print(\"number of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # get_feature_vectors_1",
"all_X_data = [] word_seq_list = [] num_tokens_list = [] unk_words = [] count",
"= np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[self.num_classes] = 1 self.tag_to_vector_map['NONE'] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE'",
"import sent_tokenize import _pickle as cPickle class DataReader: def __init__ (self, input_resources_pickle_file =None):",
"for word_seq, tag_seq in data_set: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words",
"{}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file, 'rb') as f: pickle_content = cPickle.load(f, encoding='bytes') self.word_to_ix_map = pickle_content[\"word_to_ix_map\"]",
"if not (t in self.tag_to_vector_map): continue if w in self.word_to_ix_map: count += 1",
"str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS = \" +",
"self.max_sentence_len_train =0 # Other stuff self.wordvecs = None self.word_to_ix_map = {} self.n_sentences_all =",
"(self, test_file): print(\"Loading test data from file {}\".format(test_file)) with open(test_file, 'r') as f_test:",
"training examples = \" + str(len(raw_data_train))) all_tags = sorted(list(self.tag_to_vector_map.keys())) for tag_class_id, tag in",
"################################################## def create_feature_vectors(self, all_sentences_words): all_X_data = [] word_seq_list = [] num_tokens_list = []",
"get_feature_vectors_2 (self, data_file): print(\"Loading unlabeled data from file {}\".format(data_file)) with open(data_file, 'r') as",
"{} self.wordvecs = [] ###Create LookUp Table for words and their word vectors###",
"one hot vector notation raw_data_train = [] raw_words_train = [] raw_tags_train = []",
"= word_seq[ix] w = w.lower() t = tag_seq[ix] #ignore the word if it",
"if w in self.word_to_ix_map : count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in",
"# Pad the sequences for missing entries to make them all the same",
"print(\"saving the resources into the file {}\".format(output_resources_pickle_file)) pickle_content = {} pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map",
"= self.max_sentence_len_train - len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_test",
"= np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)] = 1 if tuple(class_vec.tolist()) in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist()))",
"sent in sentences: sentence_words = nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words) ) self.n_sentences_all = len(all_sentences_words) print(\"number",
"0 self.num_embedding_features = 0 self.max_sentence_len_train =0 # Other stuff self.wordvecs = None self.word_to_ix_map",
"= np.array(all_X_train) all_Y_train = np.array(all_Y_train) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found",
"= cPickle.load(f, encoding='bytes') vocab = list(W2V_model.keys()) self.word_to_ix_map = {} self.wordvecs = [] ###Create",
"from keras.preprocessing import sequence import numpy as np import nltk from nltk.tokenize import",
"\" + str(count)) print(\"TOTAL WORDS COUNT= \" + str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\") return (all_X_train,",
"# Process all lines in the file for line in data_list: text =",
"import numpy as np import nltk from nltk.tokenize import sent_tokenize import _pickle as",
"None): self.load_resources_pickle_file (input_resources_pickle_file) ################################################## # decode_prediction_sequence ################################################## def decode_prediction_sequence (self, pred_seq): pred_tags =",
"examples = \" + str(len(data_set))) self.n_sentences_all = len(data_set) #Create TEST feature vectors all_X_test,",
"in f_train: line = line.strip() if not line: raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train =",
"= {} # For storing one hot vector notation for each Tag self.vector_to_tag_map",
"self.num_classes = pickle_content[\"num_classes\"] self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos",
"the input text into sentences before tokenization sentences = sent_tokenize(text) for sent in",
"= [] continue word, tag = line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag) if tag not in",
"- 1 elem_wordvecs.append(self.word_to_ix_map[w]) # Pad the sequences for missing entries to make them",
"raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train = [] raw_tags_train = [] continue word, tag =",
"{} if not (input_resources_pickle_file is None): self.load_resources_pickle_file (input_resources_pickle_file) ################################################## # decode_prediction_sequence ################################################## def",
"load_embedding_lookup_table (self, embeddings_file): ###Load the Word2Vec Model### print(\"Loading the W2V model from file",
"test data from file {}\".format(test_file)) with open(test_file, 'r') as f_test: data_set = []",
"(tuple(sentence_words), tuple(sentence_tags))) sentence_words = [] sentence_tags = [] continue word, tag = line.split('\\t')",
"it has uncovered ground truth entity type if not (t in self.tag_to_vector_map): continue",
"entries to make them all the same length nil_X = self.zero_vec_pos nil_Y =",
"################################################## # decode_prediction_sequence ################################################## def decode_prediction_sequence (self, pred_seq): pred_tags = [] for class_prs",
"all_Y_test = [], [] num_tokens_list = [] unk_words = [] count = 0",
"0 # Used to put 1 in the one hot vector notation raw_data_train",
"self.num_classes pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos",
"tokenization sentences = sent_tokenize(text) for sent in sentences: sentence_words = nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words)",
"elem_tags) all_X_train = np.array(all_X_train) all_Y_train = np.array(all_Y_train) print(\"UNK WORD COUNT = \" +",
"################################################## def read_and_parse_test_data (self, test_file): print(\"Loading test data from file {}\".format(test_file)) with open(test_file,",
"class_prs in pred_seq: class_vec = np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)] = 1 if tuple(class_vec.tolist()) in",
"t = tag_seq[ix] #ignore the word if it has uncovered ground truth entity",
"{}\".format(self.num_embedding_features)) # Add a zero vector for the Paddings self.wordvecs = np.vstack((self.wordvecs, np.zeros(self.num_embedding_features)))",
"\"wb\")) print(\"Done\") ################################################## # read_and_parse_test_data ################################################## def read_and_parse_test_data (self, test_file): print(\"Loading test data",
"self.num_classes += 1 print(\"number of training examples = \" + str(len(raw_data_train))) all_tags =",
"vector notation for each Tag self.vector_to_tag_map = {} self.num_classes = 0 # Used",
"\" + str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS =",
"= [] raw_tags_train = [] continue word, tag = line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag) if",
"Paddings self.wordvecs = np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos = self.wordvecs.shape[0] - 1 print(\"Done\") return (self.wordvecs)",
"import nltk from nltk.tokenize import sent_tokenize import _pickle as cPickle class DataReader: def",
"self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) # Pad the sequences for missing entries to",
"in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0]",
"word, tag = line.split('\\t') sentence_words.append(word) sentence_tags.append(tag) print(\"number of test examples = \" +",
"import sequence import numpy as np import nltk from nltk.tokenize import sent_tokenize import",
"load_resources_pickle_file ################################################## def load_resources_pickle_file (self, input_resources_pickle_file): print(\"Loading the resources pickle file {}\".format(input_resources_pickle_file)) with",
"COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT = \" + str(count+len(unk_words))) print(\"Done\")",
"in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] tag_seq = tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags =",
"= [] # Process all lines in the file for line in f_test:",
"for ix in range(len(word_seq)): w = word_seq[ix] w = w.lower() if w in",
"+ str(len(raw_data_train))) all_tags = sorted(list(self.tag_to_vector_map.keys())) for tag_class_id, tag in enumerate(all_tags): one_hot_vec = np.zeros(self.num_classes",
"[] for ix in range(len(word_seq)): w = word_seq[ix] w = w.lower() t =",
"self.word_to_ix_map = {} self.wordvecs = [] ###Create LookUp Table for words and their",
"self.create_feature_vectors(all_sentences_words) ################################################## # get_feature_vectors_1 ################################################## def get_feature_vectors_1 (self, data_list): print(\"Reading unlabeled data from",
"list of list of tokens all_sentences_words = [] # Process all lines in",
"pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y]) + elem_tags)",
"self.wordvecs pickle_content[\"num_embedding_features\"] = self.num_embedding_features pickle_content[\"num_classes\"] = self.num_classes pickle_content[\"max_sentence_len_train\"] = self.max_sentence_len_train pickle_content[\"tag_to_vector_map\"] = self.tag_to_vector_map",
"self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\")) print(\"Done\") ################################################## # read_and_parse_test_data ################################################## def read_and_parse_test_data (self, test_file):",
"= word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs = [] for ix in range(len(word_seq)): w = word_seq[ix]",
"open(test_file, 'r') as f_test: data_set = [] sentence_words = [] sentence_tags = []",
"notation raw_data_train = [] raw_words_train = [] raw_tags_train = [] # Process all",
"into sentences before tokenization sentences = sent_tokenize(text) for sent in sentences: sentence_words =",
"np.array(all_X_train) all_Y_train = np.array(all_Y_train) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS",
"= [] unk_words = [] count = 0 for word_seq in all_sentences_words: if",
"print(\"skip the extra words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] tag_seq =",
"as f_train: self.tag_to_vector_map = {} # For storing one hot vector notation for",
"= len(all_sentences_words) print(\"number of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # create_feature_vectors",
"data from file {}\".format(test_file)) with open(test_file, 'r') as f_test: data_set = [] sentence_words",
"open(data_file, 'r') as f_data: all_sentences_words = [] # Process all lines in the",
"self.word_to_ix_map = {} self.n_sentences_all = 0 self.tag_to_vector_map = {} self.vector_to_tag_map = {} if",
"tag = line.split('\\t') sentence_words.append(word) sentence_tags.append(tag) print(\"number of test examples = \" + str(len(data_set)))",
"=0 # Other stuff self.wordvecs = None self.word_to_ix_map = {} self.n_sentences_all = 0",
"data_set, num_tokens_list) ################################################## # get_feature_vectors_2 ################################################## def get_feature_vectors_2 (self, data_file): print(\"Loading unlabeled data",
"+ elem_tags) all_X_test = np.array(all_X_test) all_Y_test = np.array(all_Y_test) print(\"UNK WORD COUNT = \"",
"Process all lines in the file for line in f_data: text = line.strip()",
"=None): # Some constants self.num_classes = 0 self.num_embedding_features = 0 self.max_sentence_len_train =0 #",
"as f: pickle_content = cPickle.load(f, encoding='bytes') self.word_to_ix_map = pickle_content[\"word_to_ix_map\"] self.wordvecs = pickle_content[\"wordvecs\"] self.num_embedding_features",
"raw_words_train = [] raw_tags_train = [] continue word, tag = line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag)",
"line.strip() if not line: data_set.append( (tuple(sentence_words), tuple(sentence_tags))) sentence_words = [] sentence_tags = []",
"= {} self.wordvecs = [] ###Create LookUp Table for words and their word",
"file {}\".format(train_file)) with open(train_file, 'r') as f_train: self.tag_to_vector_map = {} # For storing",
"self.wordvecs = None self.word_to_ix_map = {} self.n_sentences_all = 0 self.tag_to_vector_map = {} self.vector_to_tag_map",
"self.max_sentence_len_train = 0 for seq in raw_data_train: if len(seq[0]) > self.max_sentence_len_train: self.max_sentence_len_train =",
"in the file for line in f_data: text = line.strip() #break the input",
"self.num_embedding_features = 0 self.max_sentence_len_train =0 # Other stuff self.wordvecs = None self.word_to_ix_map =",
"all_sentences_words.append( tuple(sentence_words) ) self.n_sentences_all = len(all_sentences_words) print(\"number of unlabeled examples = {}\".format(self.n_sentences_all)) return",
"= 0 for seq in raw_data_train: if len(seq[0]) > self.max_sentence_len_train: self.max_sentence_len_train = len(seq[0])",
"in data_list: text = line.strip() #break the input text into sentences before tokenization",
"self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos = pickle_content[\"zero_vec_pos\"] ################################################## # load_embedding_lookup_table ##################################################",
"them all the same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) pad_length =",
"sentence_tags = [] # Process all lines in the file for line in",
"#ignore the word if it has uncovered ground truth entity type if not",
"= [], [] unk_words = [] count = 0 for word_seq, tag_seq in",
"+ elem_wordvecs) all_X_data = np.array(all_X_data) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found",
"all_sentences_words): all_X_data = [] word_seq_list = [] num_tokens_list = [] unk_words = []",
"= {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # get_feature_vectors_1 ################################################## def get_feature_vectors_1 (self, data_list): print(\"Reading",
"tag in enumerate(all_tags): one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[tag_class_id] = 1 self.tag_to_vector_map[tag] =",
"{}\".format(output_resources_pickle_file)) pickle_content = {} pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map pickle_content[\"wordvecs\"] = self.wordvecs pickle_content[\"num_embedding_features\"] = self.num_embedding_features",
"tag_seq in raw_data_train: elem_wordvecs, elem_tags = [], [] for ix in range(len(word_seq)): w",
"= pickle_content[\"num_classes\"] self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos =",
"of training examples = \" + str(len(raw_data_train))) all_tags = sorted(list(self.tag_to_vector_map.keys())) for tag_class_id, tag",
"np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y])",
"all_X_data = np.array(all_X_data) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT",
"[] count = 0 for word_seq in all_sentences_words: if len(word_seq) > self.max_sentence_len_train: print(\"skip",
"np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)] = 1 if tuple(class_vec.tolist()) in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist())) return",
"[] unk_words = [] count = 0 for word_seq, tag_seq in data_set: if",
"the Word2Vec Model### print(\"Loading the W2V model from file {}\".format(embeddings_file)) #W2V_model = cPickle.load(open(embeddings_file,",
"= line.strip() if not line: data_set.append( (tuple(sentence_words), tuple(sentence_tags))) sentence_words = [] sentence_tags =",
"self.n_sentences_all = len(data_set) #Create TEST feature vectors all_X_test, all_Y_test = [], [] num_tokens_list",
"= word_seq[ix] t = tag_seq[ix] w = w.lower() if w in self.word_to_ix_map :",
"num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_X_data = np.array(all_X_data)",
"str(len(raw_data_train))) all_tags = sorted(list(self.tag_to_vector_map.keys())) for tag_class_id, tag in enumerate(all_tags): one_hot_vec = np.zeros(self.num_classes +1,",
"(self, embeddings_file): ###Load the Word2Vec Model### print(\"Loading the W2V model from file {}\".format(embeddings_file))",
"cPickle.load(open(embeddings_file, \"rb\")) with open(embeddings_file, 'rb') as f: W2V_model = cPickle.load(f, encoding='bytes') vocab =",
"unlabeled data from file {}\".format(data_file)) with open(data_file, 'r') as f_data: all_sentences_words = []",
"1 # sample from normal distribution norm_const = np.linalg.norm(new_wv) new_wv /= norm_const self.wordvecs",
"of list of tokens all_sentences_words = [] # Process all lines in the",
"sequence import numpy as np import nltk from nltk.tokenize import sent_tokenize import _pickle",
"raw_data_train: elem_wordvecs, elem_tags = [], [] for ix in range(len(word_seq)): w = word_seq[ix]",
"[] sentence_tags = [] # Process all lines in the file for line",
"COUNT = \" + str(count+len(unk_words))) print(\"Done\") return (all_X_test, all_Y_test, data_set, num_tokens_list) ################################################## #",
"# decode_prediction_sequence ################################################## def decode_prediction_sequence (self, pred_seq): pred_tags = [] for class_prs in",
"w.lower() if w in self.word_to_ix_map : count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\"",
"+= 1 elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w) w",
"2 * np.random.randn(self.num_embedding_features) - 1 # sample from normal distribution norm_const = np.linalg.norm(new_wv)",
"line in f_test: line = line.strip() if not line: data_set.append( (tuple(sentence_words), tuple(sentence_tags))) sentence_words",
"- 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) # Pad the sequences for missing entries to make",
"# Process all lines in the file for line in f_data: text =",
"WORDS COUNT= \" + str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\") return (all_X_train, all_Y_train) ################################################## # save_resources",
"self.max_sentence_len_train: print(\"skip the extra words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] tag_seq",
"+ str(count)) print(\"TOTAL WORDS COUNT= \" + str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\") return (all_X_train, all_Y_train)",
"resources pickle file {}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file, 'rb') as f: pickle_content = cPickle.load(f, encoding='bytes')",
"self.max_sentence_len_train: print(\"skip the extra words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq)",
"count = 0 for word_seq, tag_seq in raw_data_train: elem_wordvecs, elem_tags = [], []",
"= pickle_content[\"word_to_ix_map\"] self.wordvecs = pickle_content[\"wordvecs\"] self.num_embedding_features = pickle_content[\"num_embedding_features\"] self.num_classes = pickle_content[\"num_classes\"] self.max_sentence_len_train =",
"len(seq[0]) > self.max_sentence_len_train: self.max_sentence_len_train = len(seq[0]) ############## Create Train Vectors################ all_X_train, all_Y_train =",
"+ elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_test = np.array(all_X_test) all_Y_test = np.array(all_Y_test) print(\"UNK",
"= word_seq[:self.max_sentence_len_train] tag_seq = tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags = [], [] for ix in",
"def read_and_parse_training_data (self, train_file, output_resources_pickle_file): print(\"Loading the training data from file {}\".format(train_file)) with",
"pickle_content[\"wordvecs\"] self.num_embedding_features = pickle_content[\"num_embedding_features\"] self.num_classes = pickle_content[\"num_classes\"] self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"]",
"= nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words) ) self.n_sentences_all = len(all_sentences_words) print(\"number of unlabeled examples =",
"unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # get_feature_vectors_1 ################################################## def get_feature_vectors_1 (self,",
"output_resources_pickle_file): print(\"Loading the training data from file {}\".format(train_file)) with open(train_file, 'r') as f_train:",
"nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_test.append(",
"as f_data: all_sentences_words = [] # Process all lines in the file for",
"# sample from normal distribution norm_const = np.linalg.norm(new_wv) new_wv /= norm_const self.wordvecs =",
"= [] unk_words = [] count = 0 for word_seq, tag_seq in data_set:",
"- 1 # sample from normal distribution norm_const = np.linalg.norm(new_wv) new_wv /= norm_const",
"w.lower() if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\" in self.word_to_ix_map",
"elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\"",
"len(self.wordvecs[0]) print(\"embedding size = {}\".format(self.num_embedding_features)) # Add a zero vector for the Paddings",
"file for line in data_list: text = line.strip() #break the input text into",
"= np.array(all_X_data) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT =",
"print(\"Loading unlabeled data from file {}\".format(data_file)) with open(data_file, 'r') as f_data: all_sentences_words =",
"range(len(word_seq)): w = word_seq[ix] w = w.lower() t = tag_seq[ix] #ignore the word",
"+ str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT =",
": unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" new_wv = 2 *",
"save_resources ################################################## def save_resources(self, output_resources_pickle_file): print(\"saving the resources into the file {}\".format(output_resources_pickle_file)) pickle_content",
"0 self.tag_to_vector_map = {} self.vector_to_tag_map = {} if not (input_resources_pickle_file is None): self.load_resources_pickle_file",
"0 for word_seq in all_sentences_words: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words",
"+1, dtype=np.int32) one_hot_vec[tag_class_id] = 1 self.tag_to_vector_map[tag] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = tag #Adding a",
"if tuple(class_vec.tolist()) in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist())) return pred_tags ################################################## # load_resources_pickle_file ##################################################",
"'r') as f_train: self.tag_to_vector_map = {} # For storing one hot vector notation",
"data_list): print(\"Reading unlabeled data from dataframe\") # list of list of tokens all_sentences_words",
"sentence_tags = [] continue word, tag = line.split('\\t') sentence_words.append(word) sentence_tags.append(tag) print(\"number of test",
"= self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\")) print(\"Done\") ################################################## # read_and_parse_test_data ################################################## def read_and_parse_test_data (self,",
"\" + str(len(data_set))) self.n_sentences_all = len(data_set) #Create TEST feature vectors all_X_test, all_Y_test =",
"- 1 print(\"Done\") return (self.wordvecs) ################################################## ## read_and_parse_training_data ################################################## def read_and_parse_training_data (self, train_file,",
"in self.tag_to_vector_map: self.tag_to_vector_map[tag] = None self.num_classes += 1 print(\"number of training examples =",
"np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[self.num_classes] = 1 self.tag_to_vector_map['NONE'] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE' self.num_classes",
"all the same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) pad_length = self.max_sentence_len_train",
"self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X]) +",
"vectors all_X_test, all_Y_test = [], [] num_tokens_list = [] unk_words = [] count",
"pickle_content[\"zero_vec_pos\"] ################################################## # load_embedding_lookup_table ################################################## def load_embedding_lookup_table (self, embeddings_file): ###Load the Word2Vec Model###",
"w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) # Pad the sequences",
"of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # get_feature_vectors_1 ################################################## def get_feature_vectors_1",
"= index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs = np.array(self.wordvecs) print(\"Number of entries in the lookup table",
"{}\".format(len(self.wordvecs))) self.num_embedding_features = len(self.wordvecs[0]) print(\"embedding size = {}\".format(self.num_embedding_features)) # Add a zero vector",
"num_tokens_list) ################################################## # get_feature_vectors_2 ################################################## def get_feature_vectors_2 (self, data_file): print(\"Loading unlabeled data from",
"= cPickle.load(open(embeddings_file, \"rb\")) with open(embeddings_file, 'rb') as f: W2V_model = cPickle.load(f, encoding='bytes') vocab",
"elem_wordvecs) all_X_data = np.array(all_X_data) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS",
"in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs = [] for ix",
"data_set: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words in the long sentence\")",
"tuple(raw_tags_train))) raw_words_train = [] raw_tags_train = [] continue word, tag = line.split('\\t') raw_words_train.append(word)",
"from normal distribution norm_const = np.linalg.norm(new_wv) new_wv /= norm_const self.wordvecs = np.vstack((self.wordvecs, new_wv))",
"pred_seq): pred_tags = [] for class_prs in pred_seq: class_vec = np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)]",
"self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X]) + elem_wordvecs)",
"file for line in f_data: text = line.strip() #break the input text into",
"'r') as f_data: all_sentences_words = [] # Process all lines in the file",
"long sentence\") word_seq = word_seq[:self.max_sentence_len_train] tag_seq = tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags = [], []",
"- 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) # Pad the sequences for missing entries to make",
"print(\"Creating the lookup table\") for index, word in enumerate(vocab): self.word_to_ix_map[word] = index self.wordvecs.append(W2V_model[vocab[index]])",
"all the sentences the same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs))",
"= w.lower() if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\" in",
"((pad_length)*[nil_Y]) + elem_tags) all_X_test = np.array(all_X_test) all_Y_test = np.array(all_Y_test) print(\"UNK WORD COUNT =",
"all_Y_train = [], [] unk_words = [] count = 0 for word_seq, tag_seq",
"for each Tag self.vector_to_tag_map = {} self.num_classes = 0 # Used to put",
"= \" + str(count)) print(\"TOTAL WORDS = \" + str(count+len(unk_words))) print(\"Done\") return (all_X_data,",
"put 1 in the one hot vector notation raw_data_train = [] raw_words_train =",
"w.lower() t = tag_seq[ix] #ignore the word if it has uncovered ground truth",
"= tag_seq[ix] w = w.lower() if w in self.word_to_ix_map : count += 1",
"count = 0 for word_seq in all_sentences_words: if len(word_seq) > self.max_sentence_len_train: print(\"skip the",
"+ elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_train = np.array(all_X_train) all_Y_train = np.array(all_Y_train) print(\"UNK",
"> self.max_sentence_len_train: self.max_sentence_len_train = len(seq[0]) ############## Create Train Vectors################ all_X_train, all_Y_train = [],",
"new_wv = 2 * np.random.randn(self.num_embedding_features) - 1 # sample from normal distribution norm_const",
"extra words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs = []",
"= tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = tag #Adding a None Tag one_hot_vec = np.zeros(self.num_classes +1,",
"dtype=np.int32) one_hot_vec[self.num_classes] = 1 self.tag_to_vector_map['NONE'] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE' self.num_classes += 1",
"{}\".format(embeddings_file)) #W2V_model = cPickle.load(open(embeddings_file, \"rb\")) with open(embeddings_file, 'rb') as f: W2V_model = cPickle.load(f,",
"data from file {}\".format(train_file)) with open(train_file, 'r') as f_train: self.tag_to_vector_map = {} #",
"np.array(all_Y_test) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT = \"",
"= [], [] for ix in range(len(word_seq)): w = word_seq[ix] t = tag_seq[ix]",
"word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs = [] for ix in range(len(word_seq)): w = word_seq[ix] w",
"= np.array(all_Y_train) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT =",
"missing entries to make them all the same length nil_X = self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs))",
"self.vector_to_tag_map = {} self.num_classes = 0 # Used to put 1 in the",
"self.num_classes += 1 self.n_sentences_all = len(raw_data_train) # Find the maximum sequence length for",
"elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w) w = \"UNK\"",
"the sequences for missing entries to make all the sentences the same length",
"+1, dtype=np.int32) one_hot_vec[self.num_classes] = 1 self.tag_to_vector_map['NONE'] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE' self.num_classes +=",
"[] # Process all lines in the file for line in f_test: line",
"################################################## # get_feature_vectors_2 ################################################## def get_feature_vectors_2 (self, data_file): print(\"Loading unlabeled data from file",
"= self.max_sentence_len_train - len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_train",
"self.word_to_ix_map = pickle_content[\"word_to_ix_map\"] self.wordvecs = pickle_content[\"wordvecs\"] self.num_embedding_features = pickle_content[\"num_embedding_features\"] self.num_classes = pickle_content[\"num_classes\"] self.max_sentence_len_train",
"COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL",
"class_vec[np.argmax(class_prs)] = 1 if tuple(class_vec.tolist()) in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist())) return pred_tags ##################################################",
"them all the same length nil_X = self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train -",
"= w.lower() t = tag_seq[ix] #ignore the word if it has uncovered ground",
"one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[tag_class_id] = 1 self.tag_to_vector_map[tag] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] =",
"input_resources_pickle_file): print(\"Loading the resources pickle file {}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file, 'rb') as f: pickle_content",
"= 2 * np.random.randn(self.num_embedding_features) - 1 # sample from normal distribution norm_const =",
"self.save_resources(output_resources_pickle_file) print(\"Done\") return (all_X_train, all_Y_train) ################################################## # save_resources ################################################## def save_resources(self, output_resources_pickle_file): print(\"saving",
"lookup table\") for index, word in enumerate(vocab): self.word_to_ix_map[word] = index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs =",
"[] raw_tags_train = [] continue word, tag = line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag) if tag",
"= sorted(list(self.tag_to_vector_map.keys())) for tag_class_id, tag in enumerate(all_tags): one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[tag_class_id]",
"all_Y_train.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_train = np.array(all_X_train) all_Y_train = np.array(all_Y_train) print(\"UNK WORD COUNT",
"1 self.n_sentences_all = len(raw_data_train) # Find the maximum sequence length for Training data",
"Process all lines in the file for line in data_list: text = line.strip()",
"all_X_train = np.array(all_X_train) all_Y_train = np.array(all_Y_train) print(\"UNK WORD COUNT = \" + str(len(unk_words)))",
"of test examples = \" + str(len(data_set))) self.n_sentences_all = len(data_set) #Create TEST feature",
"= 0 for word_seq in all_sentences_words: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra",
"new_wv /= norm_const self.wordvecs = np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w])",
"[] raw_words_train = [] raw_tags_train = [] # Process all lines in the",
"w = word_seq[ix] w = w.lower() if w in self.word_to_ix_map: count += 1",
"open(input_resources_pickle_file, 'rb') as f: pickle_content = cPickle.load(f, encoding='bytes') self.word_to_ix_map = pickle_content[\"word_to_ix_map\"] self.wordvecs =",
"= self.word_to_ix_map pickle_content[\"wordvecs\"] = self.wordvecs pickle_content[\"num_embedding_features\"] = self.num_embedding_features pickle_content[\"num_classes\"] = self.num_classes pickle_content[\"max_sentence_len_train\"] =",
"[], [] for ix in range(len(word_seq)): w = word_seq[ix] w = w.lower() t",
"= [] raw_tags_train = [] # Process all lines in the file for",
"vector notation raw_data_train = [] raw_words_train = [] raw_tags_train = [] # Process",
"################################################## def read_and_parse_training_data (self, train_file, output_resources_pickle_file): print(\"Loading the training data from file {}\".format(train_file))",
"unk_words = [] count = 0 for word_seq, tag_seq in data_set: if len(word_seq)",
"- len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_train = np.array(all_X_train)",
"# get_feature_vectors_2 ################################################## def get_feature_vectors_2 (self, data_file): print(\"Loading unlabeled data from file {}\".format(data_file))",
"self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist())) return pred_tags ################################################## # load_resources_pickle_file ################################################## def load_resources_pickle_file (self,",
"= self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) # Pad the sequences for missing entries to",
"np.array(all_Y_train) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT = \"",
"the training data from file {}\".format(train_file)) with open(train_file, 'r') as f_train: self.tag_to_vector_map =",
"= np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[tag_class_id] = 1 self.tag_to_vector_map[tag] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = tag",
"length nil_X = self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X]) +",
"raw_tags_train.append(tag) if tag not in self.tag_to_vector_map: self.tag_to_vector_map[tag] = None self.num_classes += 1 print(\"number",
"def create_feature_vectors(self, all_sentences_words): all_X_data = [] word_seq_list = [] num_tokens_list = [] unk_words",
"'rb') as f: pickle_content = cPickle.load(f, encoding='bytes') self.word_to_ix_map = pickle_content[\"word_to_ix_map\"] self.wordvecs = pickle_content[\"wordvecs\"]",
"for word_seq in all_sentences_words: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words in",
"+ str(count+len(unk_words))) print(\"Done\") return (all_X_test, all_Y_test, data_set, num_tokens_list) ################################################## # get_feature_vectors_2 ################################################## def",
"\"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" new_wv",
"str(count)) print(\"TOTAL WORDS COUNT= \" + str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\") return (all_X_train, all_Y_train) ##################################################",
"open(embeddings_file, 'rb') as f: W2V_model = cPickle.load(f, encoding='bytes') vocab = list(W2V_model.keys()) self.word_to_ix_map =",
"sentences the same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length =",
"feature vectors all_X_test, all_Y_test = [], [] num_tokens_list = [] unk_words = []",
": unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] -",
"w = w.lower() if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\"",
"################################################## # create_feature_vectors ################################################## def create_feature_vectors(self, all_sentences_words): all_X_data = [] word_seq_list = []",
"= 0 for word_seq, tag_seq in data_set: if len(word_seq) > self.max_sentence_len_train: print(\"skip the",
"num_tokens_list = [] unk_words = [] count = 0 for word_seq, tag_seq in",
"= self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) # Pad the sequences for missing entries",
"zero vector for the Paddings self.wordvecs = np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos = self.wordvecs.shape[0] -",
"unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) # Pad the",
"= self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) # Pad the sequences for missing entries",
"else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) # Pad",
"################################################## ## read_and_parse_training_data ################################################## def read_and_parse_training_data (self, train_file, output_resources_pickle_file): print(\"Loading the training data",
"all_X_test = np.array(all_X_test) all_Y_test = np.array(all_Y_test) print(\"UNK WORD COUNT = \" + str(len(unk_words)))",
"= [] for ix in range(len(word_seq)): w = word_seq[ix] w = w.lower() if",
"pred_tags ################################################## # load_resources_pickle_file ################################################## def load_resources_pickle_file (self, input_resources_pickle_file): print(\"Loading the resources pickle",
"as f: W2V_model = cPickle.load(f, encoding='bytes') vocab = list(W2V_model.keys()) self.word_to_ix_map = {} self.wordvecs",
"w = w.lower() if w in self.word_to_ix_map : count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t])",
"print(\"number of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # create_feature_vectors ################################################## def",
"len(seq[0]) ############## Create Train Vectors################ all_X_train, all_Y_train = [], [] unk_words = []",
"\" + str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT",
"0 for word_seq, tag_seq in data_set: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra",
"= list(W2V_model.keys()) self.word_to_ix_map = {} self.wordvecs = [] ###Create LookUp Table for words",
"extra words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] tag_seq = tag_seq[:self.max_sentence_len_train] elem_wordvecs,",
"lines in the file for line in f_test: line = line.strip() if not",
"f_data: text = line.strip() #break the input text into sentences before tokenization sentences",
"class_vec = np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)] = 1 if tuple(class_vec.tolist()) in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else:",
"tag_seq[ix] w = w.lower() if w in self.word_to_ix_map : count += 1 elem_wordvecs.append(self.word_to_ix_map[w])",
"all_Y_test, data_set, num_tokens_list) ################################################## # get_feature_vectors_2 ################################################## def get_feature_vectors_2 (self, data_file): print(\"Loading unlabeled",
"enumerate(vocab): self.word_to_ix_map[word] = index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs = np.array(self.wordvecs) print(\"Number of entries in the",
"[] raw_tags_train = [] # Process all lines in the file for line",
"[] count = 0 for word_seq, tag_seq in raw_data_train: elem_wordvecs, elem_tags = [],",
"= \" + str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS",
"sentence\") word_seq = word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs = [] for ix in range(len(word_seq)): w",
"nil_Y = np.array(self.tag_to_vector_map['NONE']) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_train.append(",
"input_resources_pickle_file =None): # Some constants self.num_classes = 0 self.num_embedding_features = 0 self.max_sentence_len_train =0",
"uncovered ground truth entity type if not (t in self.tag_to_vector_map): continue if w",
"str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT= \" +",
"get_feature_vectors_2 ################################################## def get_feature_vectors_2 (self, data_file): print(\"Loading unlabeled data from file {}\".format(data_file)) with",
"file {}\".format(test_file)) with open(test_file, 'r') as f_test: data_set = [] sentence_words = []",
"for ix in range(len(word_seq)): w = word_seq[ix] w = w.lower() t = tag_seq[ix]",
"if not line: raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train = [] raw_tags_train = [] continue",
"WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT = \" + str(count+len(unk_words)))",
"W2V_model = cPickle.load(f, encoding='bytes') vocab = list(W2V_model.keys()) self.word_to_ix_map = {} self.wordvecs = []",
"read_and_parse_training_data (self, train_file, output_resources_pickle_file): print(\"Loading the training data from file {}\".format(train_file)) with open(train_file,",
"self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_X_data =",
"enumerate(all_tags): one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[tag_class_id] = 1 self.tag_to_vector_map[tag] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)]",
"Tag one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[self.num_classes] = 1 self.tag_to_vector_map['NONE'] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)]",
"return self.create_feature_vectors(all_sentences_words) ################################################## # create_feature_vectors ################################################## def create_feature_vectors(self, all_sentences_words): all_X_data = [] word_seq_list",
"each Tag self.vector_to_tag_map = {} self.num_classes = 0 # Used to put 1",
"\" + str(len(raw_data_train))) all_tags = sorted(list(self.tag_to_vector_map.keys())) for tag_class_id, tag in enumerate(all_tags): one_hot_vec =",
"# list of list of tokens all_sentences_words = [] # Process all lines",
"for line in f_train: line = line.strip() if not line: raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train)))",
"keras.preprocessing import sequence import numpy as np import nltk from nltk.tokenize import sent_tokenize",
"maximum sequence length for Training data self.max_sentence_len_train = 0 for seq in raw_data_train:",
"self.tag_to_vector_map): continue if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\"",
"pred_tags = [] for class_prs in pred_seq: class_vec = np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)] =",
"= pickle_content[\"num_embedding_features\"] self.num_classes = pickle_content[\"num_classes\"] self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map =",
"print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT = \" +",
"str(count+len(unk_words))) print(\"Done\") return (all_X_test, all_Y_test, data_set, num_tokens_list) ################################################## # get_feature_vectors_2 ################################################## def get_feature_vectors_2",
"[] # Process all lines in the file for line in f_data: text",
"str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\") return (all_X_train, all_Y_train) ################################################## # save_resources ################################################## def save_resources(self, output_resources_pickle_file):",
"all the same length nil_X = self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs)",
"{}\".format(train_file)) with open(train_file, 'r') as f_train: self.tag_to_vector_map = {} # For storing one",
"as cPickle class DataReader: def __init__ (self, input_resources_pickle_file =None): # Some constants self.num_classes",
"self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] -",
"vector for the Paddings self.wordvecs = np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos = self.wordvecs.shape[0] - 1",
"self.wordvecs.shape[0] - 1 print(\"Done\") return (self.wordvecs) ################################################## ## read_and_parse_training_data ################################################## def read_and_parse_training_data (self,",
"def read_and_parse_test_data (self, test_file): print(\"Loading test data from file {}\".format(test_file)) with open(test_file, 'r')",
"[] word_seq_list = [] num_tokens_list = [] unk_words = [] count = 0",
"1 if tuple(class_vec.tolist()) in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())]) else: print(tuple(class_vec.tolist())) return pred_tags ################################################## # load_resources_pickle_file",
"all_tags = sorted(list(self.tag_to_vector_map.keys())) for tag_class_id, tag in enumerate(all_tags): one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32)",
"unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) # Pad",
"self.num_classes = 0 # Used to put 1 in the one hot vector",
"= \" + str(len(raw_data_train))) all_tags = sorted(list(self.tag_to_vector_map.keys())) for tag_class_id, tag in enumerate(all_tags): one_hot_vec",
"print(\"number of test examples = \" + str(len(data_set))) self.n_sentences_all = len(data_set) #Create TEST",
"= line.strip() #break the input text into sentences before tokenization sentences = sent_tokenize(text)",
"((pad_length)*[nil_X]) + elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_train = np.array(all_X_train) all_Y_train = np.array(all_Y_train)",
"examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # get_feature_vectors_1 ################################################## def get_feature_vectors_1 (self, data_list):",
"################################################## def get_feature_vectors_1 (self, data_list): print(\"Reading unlabeled data from dataframe\") # list of",
"not line: data_set.append( (tuple(sentence_words), tuple(sentence_tags))) sentence_words = [] sentence_tags = [] continue word,",
"elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) # Pad the sequences for missing entries to make all the",
"sentences before tokenization sentences = sent_tokenize(text) for sent in sentences: sentence_words = nltk.word_tokenize(sent)",
"sentences: sentence_words = nltk.word_tokenize(sent) all_sentences_words.append( tuple(sentence_words) ) self.n_sentences_all = len(all_sentences_words) print(\"number of unlabeled",
"w = w.lower() t = tag_seq[ix] #ignore the word if it has uncovered",
"file for line in f_test: line = line.strip() if not line: data_set.append( (tuple(sentence_words),",
"{} self.n_sentences_all = 0 self.tag_to_vector_map = {} self.vector_to_tag_map = {} if not (input_resources_pickle_file",
"self.word_to_ix_map[word] = index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs = np.array(self.wordvecs) print(\"Number of entries in the lookup",
"word_seq[ix] w = w.lower() t = tag_seq[ix] #ignore the word if it has",
"in f_data: text = line.strip() #break the input text into sentences before tokenization",
"= \" + str(count)) print(\"TOTAL WORDS COUNT= \" + str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\") return",
"self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map = pickle_content[\"tag_to_vector_map\"] self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos = pickle_content[\"zero_vec_pos\"] ##################################################",
"None self.num_classes += 1 print(\"number of training examples = \" + str(len(raw_data_train))) all_tags",
"all_X_train.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_train = np.array(all_X_train) all_Y_train =",
"cPickle class DataReader: def __init__ (self, input_resources_pickle_file =None): # Some constants self.num_classes =",
"file {}\".format(output_resources_pickle_file)) pickle_content = {} pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map pickle_content[\"wordvecs\"] = self.wordvecs pickle_content[\"num_embedding_features\"] =",
"long sentence\") word_seq = word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs = [] for ix in range(len(word_seq)):",
"same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) pad_length = self.max_sentence_len_train - len(elem_wordvecs)",
"'r') as f_test: data_set = [] sentence_words = [] sentence_tags = [] #",
"str(len(data_set))) self.n_sentences_all = len(data_set) #Create TEST feature vectors all_X_test, all_Y_test = [], []",
"notation for each Tag self.vector_to_tag_map = {} self.num_classes = 0 # Used to",
"in self.word_to_ix_map : count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map :",
"in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" new_wv =",
"elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) # Pad the sequences for missing entries to make them all",
"= None self.num_classes += 1 print(\"number of training examples = \" + str(len(raw_data_train)))",
"count = 0 for word_seq, tag_seq in data_set: if len(word_seq) > self.max_sentence_len_train: print(\"skip",
"[] continue word, tag = line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag) if tag not in self.tag_to_vector_map:",
"= tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE' self.num_classes += 1 self.n_sentences_all = len(raw_data_train) # Find",
"= [], [] for ix in range(len(word_seq)): w = word_seq[ix] w = w.lower()",
") self.n_sentences_all = len(all_sentences_words) print(\"number of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ##################################################",
"return (self.wordvecs) ################################################## ## read_and_parse_training_data ################################################## def read_and_parse_training_data (self, train_file, output_resources_pickle_file): print(\"Loading the",
"words and their word vectors### print(\"Creating the lookup table\") for index, word in",
"################################################## # get_feature_vectors_1 ################################################## def get_feature_vectors_1 (self, data_list): print(\"Reading unlabeled data from dataframe\")",
"(self, train_file, output_resources_pickle_file): print(\"Loading the training data from file {}\".format(train_file)) with open(train_file, 'r')",
"continue word, tag = line.split('\\t') raw_words_train.append(word) raw_tags_train.append(tag) if tag not in self.tag_to_vector_map: self.tag_to_vector_map[tag]",
"def get_feature_vectors_2 (self, data_file): print(\"Loading unlabeled data from file {}\".format(data_file)) with open(data_file, 'r')",
"from file {}\".format(train_file)) with open(train_file, 'r') as f_train: self.tag_to_vector_map = {} # For",
"in the file for line in f_train: line = line.strip() if not line:",
"import _pickle as cPickle class DataReader: def __init__ (self, input_resources_pickle_file =None): # Some",
"pickle_content[\"word_to_ix_map\"] = self.word_to_ix_map pickle_content[\"wordvecs\"] = self.wordvecs pickle_content[\"num_embedding_features\"] = self.num_embedding_features pickle_content[\"num_classes\"] = self.num_classes pickle_content[\"max_sentence_len_train\"]",
"in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w)",
"= [] # Process all lines in the file for line in data_list:",
"1 print(\"number of training examples = \" + str(len(raw_data_train))) all_tags = sorted(list(self.tag_to_vector_map.keys())) for",
"sample from normal distribution norm_const = np.linalg.norm(new_wv) new_wv /= norm_const self.wordvecs = np.vstack((self.wordvecs,",
"with open(embeddings_file, 'rb') as f: W2V_model = cPickle.load(f, encoding='bytes') vocab = list(W2V_model.keys()) self.word_to_ix_map",
"= len(self.wordvecs[0]) print(\"embedding size = {}\".format(self.num_embedding_features)) # Add a zero vector for the",
"read_and_parse_test_data (self, test_file): print(\"Loading test data from file {}\".format(test_file)) with open(test_file, 'r') as",
"(self, input_resources_pickle_file): print(\"Loading the resources pickle file {}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file, 'rb') as f:",
"pred_seq: class_vec = np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)] = 1 if tuple(class_vec.tolist()) in self.vector_to_tag_map: pred_tags.append(self.vector_to_tag_map[tuple(class_vec.tolist())])",
"1 self.tag_to_vector_map[tag] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = tag #Adding a None Tag one_hot_vec =",
"in range(len(word_seq)): w = word_seq[ix] t = tag_seq[ix] w = w.lower() if w",
"test_file): print(\"Loading test data from file {}\".format(test_file)) with open(test_file, 'r') as f_test: data_set",
"in enumerate(vocab): self.word_to_ix_map[word] = index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs = np.array(self.wordvecs) print(\"Number of entries in",
"data self.max_sentence_len_train = 0 for seq in raw_data_train: if len(seq[0]) > self.max_sentence_len_train: self.max_sentence_len_train",
"np.array(all_X_test) all_Y_test = np.array(all_Y_test) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS",
"tuple(sentence_words) ) self.n_sentences_all = len(all_sentences_words) print(\"number of unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words)",
"= [] count = 0 for word_seq in all_sentences_words: if len(word_seq) > self.max_sentence_len_train:",
"self.num_classes = 0 self.num_embedding_features = 0 self.max_sentence_len_train =0 # Other stuff self.wordvecs =",
"'NONE' self.num_classes += 1 self.n_sentences_all = len(raw_data_train) # Find the maximum sequence length",
"if not line: data_set.append( (tuple(sentence_words), tuple(sentence_tags))) sentence_words = [] sentence_tags = [] continue",
"= {}\".format(self.num_embedding_features)) # Add a zero vector for the Paddings self.wordvecs = np.vstack((self.wordvecs,",
"(self, data_file): print(\"Loading unlabeled data from file {}\".format(data_file)) with open(data_file, 'r') as f_data:",
"self.wordvecs = pickle_content[\"wordvecs\"] self.num_embedding_features = pickle_content[\"num_embedding_features\"] self.num_classes = pickle_content[\"num_classes\"] self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"] self.tag_to_vector_map",
"np.array(self.wordvecs) print(\"Number of entries in the lookup table = {}\".format(len(self.wordvecs))) self.num_embedding_features = len(self.wordvecs[0])",
"self.load_resources_pickle_file (input_resources_pickle_file) ################################################## # decode_prediction_sequence ################################################## def decode_prediction_sequence (self, pred_seq): pred_tags = []",
"LookUp Table for words and their word vectors### print(\"Creating the lookup table\") for",
"length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_train.append(",
"line = line.strip() if not line: data_set.append( (tuple(sentence_words), tuple(sentence_tags))) sentence_words = [] sentence_tags",
"= self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_X_data",
"ix in range(len(word_seq)): w = word_seq[ix] w = w.lower() t = tag_seq[ix] #ignore",
"class DataReader: def __init__ (self, input_resources_pickle_file =None): # Some constants self.num_classes = 0",
"sequence length for Training data self.max_sentence_len_train = 0 for seq in raw_data_train: if",
"'rb') as f: W2V_model = cPickle.load(f, encoding='bytes') vocab = list(W2V_model.keys()) self.word_to_ix_map = {}",
"in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"])",
"{} self.num_classes = 0 # Used to put 1 in the one hot",
"self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) # Pad the sequences for missing entries to",
"= 1 self.tag_to_vector_map[tag] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = tag #Adding a None Tag one_hot_vec",
"self.max_sentence_len_train: self.max_sentence_len_train = len(seq[0]) ############## Create Train Vectors################ all_X_train, all_Y_train = [], []",
"= [] raw_words_train = [] raw_tags_train = [] # Process all lines in",
"pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\")) print(\"Done\") ################################################## # read_and_parse_test_data",
"+ str(count)) print(\"TOTAL WORDS COUNT = \" + str(count+len(unk_words))) print(\"Done\") return (all_X_test, all_Y_test,",
"def decode_prediction_sequence (self, pred_seq): pred_tags = [] for class_prs in pred_seq: class_vec =",
"<gh_stars>10-100 from keras.preprocessing import sequence import numpy as np import nltk from nltk.tokenize",
"table = {}\".format(len(self.wordvecs))) self.num_embedding_features = len(self.wordvecs[0]) print(\"embedding size = {}\".format(self.num_embedding_features)) # Add a",
"\"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w]",
"lookup table = {}\".format(len(self.wordvecs))) self.num_embedding_features = len(self.wordvecs[0]) print(\"embedding size = {}\".format(self.num_embedding_features)) # Add",
"pickle_content[\"word_to_ix_map\"] self.wordvecs = pickle_content[\"wordvecs\"] self.num_embedding_features = pickle_content[\"num_embedding_features\"] self.num_classes = pickle_content[\"num_classes\"] self.max_sentence_len_train = pickle_content[\"max_sentence_len_train\"]",
"str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT = \"",
"+ str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\") return (all_X_train, all_Y_train) ################################################## # save_resources ################################################## def save_resources(self,",
"len(data_set) #Create TEST feature vectors all_X_test, all_Y_test = [], [] num_tokens_list = []",
": unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1",
"data_list: text = line.strip() #break the input text into sentences before tokenization sentences",
"dtype=np.int32) one_hot_vec[tag_class_id] = 1 self.tag_to_vector_map[tag] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = tag #Adding a None",
"sentence_tags.append(tag) print(\"number of test examples = \" + str(len(data_set))) self.n_sentences_all = len(data_set) #Create",
"index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs = np.array(self.wordvecs) print(\"Number of entries in the lookup table =",
"and their word vectors### print(\"Creating the lookup table\") for index, word in enumerate(vocab):",
"{}\".format(test_file)) with open(test_file, 'r') as f_test: data_set = [] sentence_words = [] sentence_tags",
"decode_prediction_sequence ################################################## def decode_prediction_sequence (self, pred_seq): pred_tags = [] for class_prs in pred_seq:",
"word_seq, tag_seq in raw_data_train: elem_wordvecs, elem_tags = [], [] for ix in range(len(word_seq)):",
"line.strip() #break the input text into sentences before tokenization sentences = sent_tokenize(text) for",
"0 for word_seq, tag_seq in raw_data_train: elem_wordvecs, elem_tags = [], [] for ix",
"Pad the sequences for missing entries to make them all the same length",
"{}\".format(data_file)) with open(data_file, 'r') as f_data: all_sentences_words = [] # Process all lines",
"= w.lower() if w in self.word_to_ix_map : count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif",
"\" + str(count+len(unk_words))) self.save_resources(output_resources_pickle_file) print(\"Done\") return (all_X_train, all_Y_train) ################################################## # save_resources ################################################## def",
"in self.tag_to_vector_map): continue if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif",
"= 0 for word_seq, tag_seq in raw_data_train: elem_wordvecs, elem_tags = [], [] for",
"len(raw_data_train) # Find the maximum sequence length for Training data self.max_sentence_len_train = 0",
"###Load the Word2Vec Model### print(\"Loading the W2V model from file {}\".format(embeddings_file)) #W2V_model =",
"self.n_sentences_all = len(raw_data_train) # Find the maximum sequence length for Training data self.max_sentence_len_train",
"if len(seq[0]) > self.max_sentence_len_train: self.max_sentence_len_train = len(seq[0]) ############## Create Train Vectors################ all_X_train, all_Y_train",
"elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_train = np.array(all_X_train) all_Y_train = np.array(all_Y_train) print(\"UNK WORD",
"elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) #",
"def load_embedding_lookup_table (self, embeddings_file): ###Load the Word2Vec Model### print(\"Loading the W2V model from",
"For storing one hot vector notation for each Tag self.vector_to_tag_map = {} self.num_classes",
"+ str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS = \"",
"= self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X]) +",
"self.tag_to_vector_map[tag] = None self.num_classes += 1 print(\"number of training examples = \" +",
"create_feature_vectors(self, all_sentences_words): all_X_data = [] word_seq_list = [] num_tokens_list = [] unk_words =",
"= self.max_sentence_len_train - len(elem_wordvecs) all_X_data.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_X_data = np.array(all_X_data) print(\"UNK WORD",
"the resources pickle file {}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file, 'rb') as f: pickle_content = cPickle.load(f,",
"0 for seq in raw_data_train: if len(seq[0]) > self.max_sentence_len_train: self.max_sentence_len_train = len(seq[0]) ##############",
"cPickle.load(f, encoding='bytes') vocab = list(W2V_model.keys()) self.word_to_ix_map = {} self.wordvecs = [] ###Create LookUp",
"self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"])",
"nil_Y = np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X]) + elem_wordvecs)",
"all_Y_test = np.array(all_Y_test) print(\"UNK WORD COUNT = \" + str(len(unk_words))) print(\"Found WORDS COUNT",
"str(count)) print(\"TOTAL WORDS COUNT = \" + str(count+len(unk_words))) print(\"Done\") return (all_X_test, all_Y_test, data_set,",
"decode_prediction_sequence (self, pred_seq): pred_tags = [] for class_prs in pred_seq: class_vec = np.zeros(self.num_classes,",
"print(\"Reading unlabeled data from dataframe\") # list of list of tokens all_sentences_words =",
"# For storing one hot vector notation for each Tag self.vector_to_tag_map = {}",
"distribution norm_const = np.linalg.norm(new_wv) new_wv /= norm_const self.wordvecs = np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w] =",
"continue word, tag = line.split('\\t') sentence_words.append(word) sentence_tags.append(tag) print(\"number of test examples = \"",
"= [] # Process all lines in the file for line in f_train:",
"the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs = [] for ix in",
"np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[tag_class_id] = 1 self.tag_to_vector_map[tag] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = tag #Adding",
"elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" new_wv = 2 * np.random.randn(self.num_embedding_features) - 1",
"missing entries to make them all the same length nil_X = self.zero_vec_pos nil_Y",
"not (input_resources_pickle_file is None): self.load_resources_pickle_file (input_resources_pickle_file) ################################################## # decode_prediction_sequence ################################################## def decode_prediction_sequence (self,",
"read_and_parse_test_data ################################################## def read_and_parse_test_data (self, test_file): print(\"Loading test data from file {}\".format(test_file)) with",
"np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos = self.wordvecs.shape[0] - 1 print(\"Done\") return (self.wordvecs) ################################################## ## read_and_parse_training_data",
"w = word_seq[ix] w = w.lower() t = tag_seq[ix] #ignore the word if",
"(self, input_resources_pickle_file =None): # Some constants self.num_classes = 0 self.num_embedding_features = 0 self.max_sentence_len_train",
"elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_test = np.array(all_X_test) all_Y_test = np.array(all_Y_test) print(\"UNK WORD",
"open(output_resources_pickle_file, \"wb\")) print(\"Done\") ################################################## # read_and_parse_test_data ################################################## def read_and_parse_test_data (self, test_file): print(\"Loading test",
"= {} self.num_classes = 0 # Used to put 1 in the one",
"self.wordvecs = np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) # Pad",
"+= 1 self.n_sentences_all = len(raw_data_train) # Find the maximum sequence length for Training",
"(t in self.tag_to_vector_map): continue if w in self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t])",
"Add a zero vector for the Paddings self.wordvecs = np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos =",
"unlabeled data from dataframe\") # list of list of tokens all_sentences_words = []",
"nltk from nltk.tokenize import sent_tokenize import _pickle as cPickle class DataReader: def __init__",
"one_hot_vec[self.num_classes] = 1 self.tag_to_vector_map['NONE'] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = 'NONE' self.num_classes += 1 self.n_sentences_all",
"in range(len(word_seq)): w = word_seq[ix] w = w.lower() if w in self.word_to_ix_map: count",
"= self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\")) print(\"Done\") ##################################################",
"def save_resources(self, output_resources_pickle_file): print(\"saving the resources into the file {}\".format(output_resources_pickle_file)) pickle_content = {}",
"################################################## # read_and_parse_test_data ################################################## def read_and_parse_test_data (self, test_file): print(\"Loading test data from file",
"make them all the same length nil_X = self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length = self.max_sentence_len_train",
"their word vectors### print(\"Creating the lookup table\") for index, word in enumerate(vocab): self.word_to_ix_map[word]",
"self.max_sentence_len_train = len(seq[0]) ############## Create Train Vectors################ all_X_train, all_Y_train = [], [] unk_words",
"tag not in self.tag_to_vector_map: self.tag_to_vector_map[tag] = None self.num_classes += 1 print(\"number of training",
"self.vector_to_tag_map[tuple(one_hot_vec)] = tag #Adding a None Tag one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[self.num_classes]",
": count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"])",
"self.max_sentence_len_train - len(elem_wordvecs) all_X_train.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_train.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_train =",
"self.word_to_ix_map: count += 1 elem_wordvecs.append(self.word_to_ix_map[w]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else:",
"= tag #Adding a None Tag one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[self.num_classes] =",
"all_Y_test.append( ((pad_length)*[nil_Y]) + elem_tags) all_X_test = np.array(all_X_test) all_Y_test = np.array(all_Y_test) print(\"UNK WORD COUNT",
"unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) else: unk_words.append(w) w = \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w])",
"None self.word_to_ix_map = {} self.n_sentences_all = 0 self.tag_to_vector_map = {} self.vector_to_tag_map = {}",
"load_resources_pickle_file (self, input_resources_pickle_file): print(\"Loading the resources pickle file {}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file, 'rb') as",
"vocab = list(W2V_model.keys()) self.word_to_ix_map = {} self.wordvecs = [] ###Create LookUp Table for",
"seq in raw_data_train: if len(seq[0]) > self.max_sentence_len_train: self.max_sentence_len_train = len(seq[0]) ############## Create Train",
"the extra words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] tag_seq = tag_seq[:self.max_sentence_len_train]",
"self.vector_to_tag_map = pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos = pickle_content[\"zero_vec_pos\"] ################################################## # load_embedding_lookup_table ################################################## def load_embedding_lookup_table (self,",
"to make them all the same length nil_X = self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length =",
"= {} if not (input_resources_pickle_file is None): self.load_resources_pickle_file (input_resources_pickle_file) ################################################## # decode_prediction_sequence ##################################################",
"the sentences the same length nil_X = self.zero_vec_pos nil_Y = np.array(self.tag_to_vector_map['NONE']) num_tokens_list.append(len(elem_wordvecs)) pad_length",
"in pred_seq: class_vec = np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)] = 1 if tuple(class_vec.tolist()) in self.vector_to_tag_map:",
"pickle_content[\"vector_to_tag_map\"] self.zero_vec_pos = pickle_content[\"zero_vec_pos\"] ################################################## # load_embedding_lookup_table ################################################## def load_embedding_lookup_table (self, embeddings_file): ###Load",
"f_test: line = line.strip() if not line: data_set.append( (tuple(sentence_words), tuple(sentence_tags))) sentence_words = []",
"\" + str(count+len(unk_words))) print(\"Done\") return (all_X_test, all_Y_test, data_set, num_tokens_list) ################################################## # get_feature_vectors_2 ##################################################",
"0 self.max_sentence_len_train =0 # Other stuff self.wordvecs = None self.word_to_ix_map = {} self.n_sentences_all",
"[] ###Create LookUp Table for words and their word vectors### print(\"Creating the lookup",
"(input_resources_pickle_file is None): self.load_resources_pickle_file (input_resources_pickle_file) ################################################## # decode_prediction_sequence ################################################## def decode_prediction_sequence (self, pred_seq):",
"(all_X_train, all_Y_train) ################################################## # save_resources ################################################## def save_resources(self, output_resources_pickle_file): print(\"saving the resources into",
"print(\"Loading the training data from file {}\".format(train_file)) with open(train_file, 'r') as f_train: self.tag_to_vector_map",
"unlabeled examples = {}\".format(self.n_sentences_all)) return self.create_feature_vectors(all_sentences_words) ################################################## # create_feature_vectors ################################################## def create_feature_vectors(self, all_sentences_words):",
"line.strip() if not line: raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train = [] raw_tags_train = []",
"the Paddings self.wordvecs = np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos = self.wordvecs.shape[0] - 1 print(\"Done\") return",
"f_train: self.tag_to_vector_map = {} # For storing one hot vector notation for each",
"Train Vectors################ all_X_train, all_Y_train = [], [] unk_words = [] count = 0",
"the extra words in the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] word_seq_list.append(word_seq) elem_wordvecs =",
"# Add a zero vector for the Paddings self.wordvecs = np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos",
"return (all_X_train, all_Y_train) ################################################## # save_resources ################################################## def save_resources(self, output_resources_pickle_file): print(\"saving the resources",
"not in self.tag_to_vector_map: self.tag_to_vector_map[tag] = None self.num_classes += 1 print(\"number of training examples",
"= [], [] num_tokens_list = [] unk_words = [] count = 0 for",
"tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags = [], [] for ix in range(len(word_seq)): w = word_seq[ix]",
"def __init__ (self, input_resources_pickle_file =None): # Some constants self.num_classes = 0 self.num_embedding_features =",
"[] continue word, tag = line.split('\\t') sentence_words.append(word) sentence_tags.append(tag) print(\"number of test examples =",
"the long sentence\") word_seq = word_seq[:self.max_sentence_len_train] tag_seq = tag_seq[:self.max_sentence_len_train] elem_wordvecs, elem_tags = [],",
"the word if it has uncovered ground truth entity type if not (t",
"missing entries to make all the sentences the same length nil_X = self.zero_vec_pos",
"[] for ix in range(len(word_seq)): w = word_seq[ix] w = w.lower() if w",
"all lines in the file for line in f_data: text = line.strip() #break",
"one hot vector notation for each Tag self.vector_to_tag_map = {} self.num_classes = 0",
"tag #Adding a None Tag one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[self.num_classes] = 1",
"norm_const self.wordvecs = np.vstack((self.wordvecs, new_wv)) self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) #",
"f_test: data_set = [] sentence_words = [] sentence_tags = [] # Process all",
"+ str(len(unk_words))) print(\"Found WORDS COUNT = \" + str(count)) print(\"TOTAL WORDS COUNT= \"",
"one_hot_vec = np.zeros(self.num_classes +1, dtype=np.int32) one_hot_vec[self.num_classes] = 1 self.tag_to_vector_map['NONE'] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] =",
"data_set.append( (tuple(sentence_words), tuple(sentence_tags))) sentence_words = [] sentence_tags = [] continue word, tag =",
"for the Paddings self.wordvecs = np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos = self.wordvecs.shape[0] - 1 print(\"Done\")",
"WORDS COUNT = \" + str(count+len(unk_words))) print(\"Done\") return (all_X_test, all_Y_test, data_set, num_tokens_list) ##################################################",
"pad_length = self.max_sentence_len_train - len(elem_wordvecs) all_X_test.append( ((pad_length)*[nil_X]) + elem_wordvecs) all_Y_test.append( ((pad_length)*[nil_Y]) + elem_tags)",
"1 print(\"Done\") return (self.wordvecs) ################################################## ## read_and_parse_training_data ################################################## def read_and_parse_training_data (self, train_file, output_resources_pickle_file):",
"self.tag_to_vector_map pickle_content[\"vector_to_tag_map\"] = self.vector_to_tag_map pickle_content[\"zero_vec_pos\"] = self.zero_vec_pos cPickle.dump(pickle_content, open(output_resources_pickle_file, \"wb\")) print(\"Done\") ################################################## #",
"= [] sentence_words = [] sentence_tags = [] # Process all lines in",
"Process all lines in the file for line in f_train: line = line.strip()",
"\" + str(count)) print(\"TOTAL WORDS COUNT = \" + str(count+len(unk_words))) print(\"Done\") return (all_X_test,",
"elem_tags.append(self.tag_to_vector_map[t]) elif \"UNK\" in self.word_to_ix_map : unk_words.append(w) elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w =",
"all_sentences_words: if len(word_seq) > self.max_sentence_len_train: print(\"skip the extra words in the long sentence\")",
"1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(list(self.tag_to_vector_map[t])) # Pad the sequences for missing entries to make them",
"(self, pred_seq): pred_tags = [] for class_prs in pred_seq: class_vec = np.zeros(self.num_classes, dtype=np.int32)",
"sentence_words = [] sentence_tags = [] # Process all lines in the file",
"[], [] unk_words = [] count = 0 for word_seq, tag_seq in raw_data_train:",
"= \"UNK\" self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) # Pad the sequences for",
"a zero vector for the Paddings self.wordvecs = np.vstack((self.wordvecs, np.zeros(self.num_embedding_features))) self.zero_vec_pos = self.wordvecs.shape[0]",
"{} # For storing one hot vector notation for each Tag self.vector_to_tag_map =",
"pickle_content = cPickle.load(f, encoding='bytes') self.word_to_ix_map = pickle_content[\"word_to_ix_map\"] self.wordvecs = pickle_content[\"wordvecs\"] self.num_embedding_features = pickle_content[\"num_embedding_features\"]",
"embeddings_file): ###Load the Word2Vec Model### print(\"Loading the W2V model from file {}\".format(embeddings_file)) #W2V_model",
"TEST feature vectors all_X_test, all_Y_test = [], [] num_tokens_list = [] unk_words =",
"= \" + str(count+len(unk_words))) print(\"Done\") return (all_X_test, all_Y_test, data_set, num_tokens_list) ################################################## # get_feature_vectors_2",
"self.wordvecs = [] ###Create LookUp Table for words and their word vectors### print(\"Creating",
"elem_wordvecs.append(self.word_to_ix_map[\"UNK\"]) elem_tags.append(self.tag_to_vector_map[t]) else: unk_words.append(w) w = \"UNK\" new_wv = 2 * np.random.randn(self.num_embedding_features) -",
"line: data_set.append( (tuple(sentence_words), tuple(sentence_tags))) sentence_words = [] sentence_tags = [] continue word, tag",
"load_embedding_lookup_table ################################################## def load_embedding_lookup_table (self, embeddings_file): ###Load the Word2Vec Model### print(\"Loading the W2V",
"self.tag_to_vector_map[tag] = tuple(one_hot_vec) self.vector_to_tag_map[tuple(one_hot_vec)] = tag #Adding a None Tag one_hot_vec = np.zeros(self.num_classes",
"entries to make them all the same length nil_X = self.zero_vec_pos num_tokens_list.append(len(elem_wordvecs)) pad_length",
"W2V model from file {}\".format(embeddings_file)) #W2V_model = cPickle.load(open(embeddings_file, \"rb\")) with open(embeddings_file, 'rb') as",
"= [] for class_prs in pred_seq: class_vec = np.zeros(self.num_classes, dtype=np.int32) class_vec[np.argmax(class_prs)] = 1",
"for index, word in enumerate(vocab): self.word_to_ix_map[word] = index self.wordvecs.append(W2V_model[vocab[index]]) self.wordvecs = np.array(self.wordvecs) print(\"Number",
"size = {}\".format(self.num_embedding_features)) # Add a zero vector for the Paddings self.wordvecs =",
"if it has uncovered ground truth entity type if not (t in self.tag_to_vector_map):",
"save_resources(self, output_resources_pickle_file): print(\"saving the resources into the file {}\".format(output_resources_pickle_file)) pickle_content = {} pickle_content[\"word_to_ix_map\"]",
"encoding='bytes') self.word_to_ix_map = pickle_content[\"word_to_ix_map\"] self.wordvecs = pickle_content[\"wordvecs\"] self.num_embedding_features = pickle_content[\"num_embedding_features\"] self.num_classes = pickle_content[\"num_classes\"]",
"################################################## def load_embedding_lookup_table (self, embeddings_file): ###Load the Word2Vec Model### print(\"Loading the W2V model",
"\"UNK\" new_wv = 2 * np.random.randn(self.num_embedding_features) - 1 # sample from normal distribution",
"self.word_to_ix_map[w] = self.wordvecs.shape[0] - 1 elem_wordvecs.append(self.word_to_ix_map[w]) elem_tags.append(self.tag_to_vector_map[t]) # Pad the sequences for missing",
"def load_resources_pickle_file (self, input_resources_pickle_file): print(\"Loading the resources pickle file {}\".format(input_resources_pickle_file)) with open(input_resources_pickle_file, 'rb')",
"np import nltk from nltk.tokenize import sent_tokenize import _pickle as cPickle class DataReader:",
"sent_tokenize import _pickle as cPickle class DataReader: def __init__ (self, input_resources_pickle_file =None): #",
"f_train: line = line.strip() if not line: raw_data_train.append( (tuple(raw_words_train), tuple(raw_tags_train))) raw_words_train = []"
] |
[
"settings logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s') logger = logging.getLogger() fileHandler = logging.FileHandler('{0}'.format(settings.LOG_FILE_PATH)) fileHandler.setFormatter(logFormatter)",
"logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s') logger = logging.getLogger() fileHandler = logging.FileHandler('{0}'.format(settings.LOG_FILE_PATH)) fileHandler.setFormatter(logFormatter) logger.addHandler(fileHandler)",
"import logging import settings logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s') logger = logging.getLogger() fileHandler",
"logging import settings logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s') logger = logging.getLogger() fileHandler =",
"[%(levelname)-5.5s] %(message)s') logger = logging.getLogger() fileHandler = logging.FileHandler('{0}'.format(settings.LOG_FILE_PATH)) fileHandler.setFormatter(logFormatter) logger.addHandler(fileHandler) consoleHandler = logging.StreamHandler(sys.stdout)",
"%(message)s') logger = logging.getLogger() fileHandler = logging.FileHandler('{0}'.format(settings.LOG_FILE_PATH)) fileHandler.setFormatter(logFormatter) logger.addHandler(fileHandler) consoleHandler = logging.StreamHandler(sys.stdout) consoleHandler.setFormatter(logFormatter)",
"logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s') logger = logging.getLogger() fileHandler = logging.FileHandler('{0}'.format(settings.LOG_FILE_PATH)) fileHandler.setFormatter(logFormatter) logger.addHandler(fileHandler) consoleHandler =",
"import settings logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s') logger = logging.getLogger() fileHandler = logging.FileHandler('{0}'.format(settings.LOG_FILE_PATH))",
"coding: utf-8 import sys import logging import settings logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s')",
"= logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s') logger = logging.getLogger() fileHandler = logging.FileHandler('{0}'.format(settings.LOG_FILE_PATH)) fileHandler.setFormatter(logFormatter) logger.addHandler(fileHandler) consoleHandler",
"import sys import logging import settings logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s') logger =",
"logger = logging.getLogger() fileHandler = logging.FileHandler('{0}'.format(settings.LOG_FILE_PATH)) fileHandler.setFormatter(logFormatter) logger.addHandler(fileHandler) consoleHandler = logging.StreamHandler(sys.stdout) consoleHandler.setFormatter(logFormatter) logger.addHandler(consoleHandler)",
"# coding: utf-8 import sys import logging import settings logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s]",
"sys import logging import settings logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s') logger = logging.getLogger()",
"utf-8 import sys import logging import settings logFormatter = logging.Formatter('%(asctime)s [%(levelname)-5.5s] %(message)s') logger"
] |
[
"'0005_auto_20170127_1841'), ] operations = [ migrations.RemoveField( model_name='ciudadano', name='uuid', ), migrations.AlterField( model_name='ciudadano', name='numero_documento', field=models.CharField(blank=True,",
"by Django 1.10.1 on 2017-01-31 11:14 from __future__ import unicode_literals from django.db import",
"-*- # Generated by Django 1.10.1 on 2017-01-31 11:14 from __future__ import unicode_literals",
"name='uuid', ), migrations.AlterField( model_name='ciudadano', name='numero_documento', field=models.CharField(blank=True, max_length=11, null=True, unique=True, verbose_name='Número de documento'), ),",
"] operations = [ migrations.RemoveField( model_name='ciudadano', name='uuid', ), migrations.AlterField( model_name='ciudadano', name='numero_documento', field=models.CharField(blank=True, max_length=11,",
"[ migrations.RemoveField( model_name='ciudadano', name='uuid', ), migrations.AlterField( model_name='ciudadano', name='numero_documento', field=models.CharField(blank=True, max_length=11, null=True, unique=True, verbose_name='Número",
"Django 1.10.1 on 2017-01-31 11:14 from __future__ import unicode_literals from django.db import migrations,",
"coding: utf-8 -*- # Generated by Django 1.10.1 on 2017-01-31 11:14 from __future__",
"class Migration(migrations.Migration): dependencies = [ ('ciudadano', '0005_auto_20170127_1841'), ] operations = [ migrations.RemoveField( model_name='ciudadano',",
"2017-01-31 11:14 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration):",
"import migrations, models class Migration(migrations.Migration): dependencies = [ ('ciudadano', '0005_auto_20170127_1841'), ] operations =",
"migrations.RemoveField( model_name='ciudadano', name='uuid', ), migrations.AlterField( model_name='ciudadano', name='numero_documento', field=models.CharField(blank=True, max_length=11, null=True, unique=True, verbose_name='Número de",
"1.10.1 on 2017-01-31 11:14 from __future__ import unicode_literals from django.db import migrations, models",
"# Generated by Django 1.10.1 on 2017-01-31 11:14 from __future__ import unicode_literals from",
"django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('ciudadano', '0005_auto_20170127_1841'), ] operations",
"-*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2017-01-31 11:14 from",
"from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('ciudadano', '0005_auto_20170127_1841'), ]",
"operations = [ migrations.RemoveField( model_name='ciudadano', name='uuid', ), migrations.AlterField( model_name='ciudadano', name='numero_documento', field=models.CharField(blank=True, max_length=11, null=True,",
"= [ ('ciudadano', '0005_auto_20170127_1841'), ] operations = [ migrations.RemoveField( model_name='ciudadano', name='uuid', ), migrations.AlterField(",
"[ ('ciudadano', '0005_auto_20170127_1841'), ] operations = [ migrations.RemoveField( model_name='ciudadano', name='uuid', ), migrations.AlterField( model_name='ciudadano',",
"model_name='ciudadano', name='uuid', ), migrations.AlterField( model_name='ciudadano', name='numero_documento', field=models.CharField(blank=True, max_length=11, null=True, unique=True, verbose_name='Número de documento'),",
"on 2017-01-31 11:14 from __future__ import unicode_literals from django.db import migrations, models class",
"11:14 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies",
"), migrations.AlterField( model_name='ciudadano', name='numero_documento', field=models.CharField(blank=True, max_length=11, null=True, unique=True, verbose_name='Número de documento'), ), ]",
"Generated by Django 1.10.1 on 2017-01-31 11:14 from __future__ import unicode_literals from django.db",
"unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('ciudadano', '0005_auto_20170127_1841'),",
"migrations, models class Migration(migrations.Migration): dependencies = [ ('ciudadano', '0005_auto_20170127_1841'), ] operations = [",
"dependencies = [ ('ciudadano', '0005_auto_20170127_1841'), ] operations = [ migrations.RemoveField( model_name='ciudadano', name='uuid', ),",
"from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies =",
"import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('ciudadano',",
"__future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [",
"('ciudadano', '0005_auto_20170127_1841'), ] operations = [ migrations.RemoveField( model_name='ciudadano', name='uuid', ), migrations.AlterField( model_name='ciudadano', name='numero_documento',",
"utf-8 -*- # Generated by Django 1.10.1 on 2017-01-31 11:14 from __future__ import",
"Migration(migrations.Migration): dependencies = [ ('ciudadano', '0005_auto_20170127_1841'), ] operations = [ migrations.RemoveField( model_name='ciudadano', name='uuid',",
"= [ migrations.RemoveField( model_name='ciudadano', name='uuid', ), migrations.AlterField( model_name='ciudadano', name='numero_documento', field=models.CharField(blank=True, max_length=11, null=True, unique=True,",
"# -*- coding: utf-8 -*- # Generated by Django 1.10.1 on 2017-01-31 11:14",
"models class Migration(migrations.Migration): dependencies = [ ('ciudadano', '0005_auto_20170127_1841'), ] operations = [ migrations.RemoveField("
] |
[
"\"episode_reward_mean\": 0.99, \"training_iteration\":200, } # trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model) # # trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model)",
"\"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":10, \"num_gpus\":1,",
"\"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000, # \"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001]) } config_for_trainer =",
"\"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\", } stop = { \"episode_reward_mean\":",
"<filename>src/train/kuka_reach/with_image/train_with_rllib.py import time import ray import ray.rllib.agents.ppo as ppo from ray.tune.logger import pretty_print",
"env_config={ # \"is_render\":False, # \"is_good_view\":False, # \"max_steps_one_episode\":1000, # } # env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env)",
"pretty_print from env import CustomSkipFrame, KukaCamReachEnv from ray import tune from ray.tune import",
"train config=config, stop=stop, checkpoint_freq=1, ) metric=\"episode_reward_mean\" best_trial = results.get_best_trial(metric=metric, mode=\"max\", scope=\"all\") best_checkpoint=results.get_best_checkpoint(best_trial,metric=metric,mode=\"max\") print('best",
"# \"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\", # }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, },",
"import ray.rllib.agents.ppo as ppo from ray.tune.logger import pretty_print from env import CustomSkipFrame, KukaCamReachEnv",
"\"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # # \"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\", # }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False,",
"CustomSkipFrame, KukaCamReachEnv from ray import tune from ray.tune import grid_search from ray.rllib.env.env_context import",
"import grid_search from ray.rllib.env.env_context import EnvContext from ray.tune.registry import register_env, register_trainable from ray.rllib.agents.ppo",
"\"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\", } stop = { \"episode_reward_mean\": 0.99,",
"0.99, \"training_iteration\":200, } # trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model) # # trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model) results",
"\"max_steps_one_episode\":1000, # } # env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config))",
"\"rollout_fragment_length\":1000, # \"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001]) } config_for_trainer = { \"env\":",
"\"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":10,",
"ray.tune.logger import pretty_print from env import CustomSkipFrame, KukaCamReachEnv from ray import tune from",
"# \"rollout_fragment_length\":1000, # \"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001]) } config_for_trainer = {",
"print(trainer.get_policy().model) # # trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model) results = tune.run( \"SAC\", # Specify the",
"print(trainer.get_policy().model) results = tune.run( \"SAC\", # Specify the algorithm to train config=config, stop=stop,",
"time import ray import ray.rllib.agents.ppo as ppo from ray.tune.logger import pretty_print from env",
"env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config)) config = { \"env\":",
"} # env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config)) config =",
"KukaCamReachEnv from ray import tune from ray.tune import grid_search from ray.rllib.env.env_context import EnvContext",
"KukaCamReachEnv(config)) config = { \"env\": \"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", },",
"= tune.run( \"SAC\", # Specify the algorithm to train config=config, stop=stop, checkpoint_freq=1, )",
"config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config)) config = { \"env\": \"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], #",
"\"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\", # }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":1,",
"}, \"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\", # \"render_env\":False, # \"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000, #",
"from ray import tune from ray.tune import grid_search from ray.rllib.env.env_context import EnvContext from",
"\"framework\":\"torch\", # \"render_env\":False, # \"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000, # \"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\",",
"\"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\", #",
"\"render_env\":False, # \"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000, # \"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, #",
"# Specify the algorithm to train config=config, stop=stop, checkpoint_freq=1, ) metric=\"episode_reward_mean\" best_trial =",
"= { \"env\": \"kuka_env\", # \"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # # \"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251],",
"results = tune.run( \"SAC\", # Specify the algorithm to train config=config, stop=stop, checkpoint_freq=1,",
"ray.rllib.agents.ppo import PPOTrainer from ray.rllib.agents.impala import ImpalaTrainer if __name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True) # env_config={",
"ray import ray.rllib.agents.ppo as ppo from ray.tune.logger import pretty_print from env import CustomSkipFrame,",
"# }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\", } stop =",
"# \"render_env\":False, # \"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000, # \"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001,",
"config_for_trainer = { \"env\": \"kuka_env\", # \"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # # \"conv_filters\":\"relu\", #",
"register_env, register_trainable from ray.rllib.agents.ppo import PPOTrainer from ray.rllib.agents.impala import ImpalaTrainer if __name__=='__main__': ray.shutdown()",
"\"training_iteration\":200, } # trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model) # # trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model) results =",
"from ray.rllib.agents.impala import ImpalaTrainer if __name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True) # env_config={ # \"is_render\":False, #",
"# \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # # \"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\", # }, \"env_config\":{ \"is_render\":False,",
"ray.rllib.env.env_context import EnvContext from ray.tune.registry import register_env, register_trainable from ray.rllib.agents.ppo import PPOTrainer from",
"\"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, },",
"}, \"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\", } stop = { \"episode_reward_mean\": 0.99, \"training_iteration\":200, } #",
"\"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000, # \"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001]) }",
"\"is_good_view\":False, # \"max_steps_one_episode\":1000, # } # env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda",
"# \"max_steps_one_episode\":1000, # } # env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config:",
"# trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model) results = tune.run( \"SAC\", # Specify the algorithm to",
"# \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001]) } config_for_trainer = { \"env\": \"kuka_env\", # \"model\":{",
"ray import tune from ray.tune import grid_search from ray.rllib.env.env_context import EnvContext from ray.tune.registry",
"# \"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # # \"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\", # },",
"stop=stop, checkpoint_freq=1, ) metric=\"episode_reward_mean\" best_trial = results.get_best_trial(metric=metric, mode=\"max\", scope=\"all\") best_checkpoint=results.get_best_checkpoint(best_trial,metric=metric,mode=\"max\") print('best checkpoint: ',best_checkpoint)",
"\"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\", # \"render_env\":False,",
"# env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config)) config = { \"env\": \"kuka_env\",",
"# print(trainer.get_policy().model) results = tune.run( \"SAC\", # Specify the algorithm to train config=config,",
"to train config=config, stop=stop, checkpoint_freq=1, ) metric=\"episode_reward_mean\" best_trial = results.get_best_trial(metric=metric, mode=\"max\", scope=\"all\") best_checkpoint=results.get_best_checkpoint(best_trial,metric=metric,mode=\"max\")",
"\"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\", # }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\",",
"\"env\": \"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000,",
"\"num_gpus\":1, \"framework\":\"torch\", } stop = { \"episode_reward_mean\": 0.99, \"training_iteration\":200, } # trainer=PPOTrainer(config=config_for_trainer) #",
"\"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001]) } config_for_trainer = { \"env\": \"kuka_env\", #",
"#\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001]) } config_for_trainer = { \"env\": \"kuka_env\", # \"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]],",
"} # trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model) # # trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model) results = tune.run(",
"}, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\", } stop = {",
"= { \"episode_reward_mean\": 0.99, \"training_iteration\":200, } # trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model) # # trainer=ImpalaTrainer(config=config_for_trainer)",
"\"kuka_env\", # \"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # # \"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\", #",
"{ \"env\": \"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False,",
"# } # env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config)) config",
"# # \"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\", # }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000,",
"trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model) results = tune.run( \"SAC\", # Specify the algorithm to train",
"ray.shutdown() ray.init(ignore_reinit_error=True) # env_config={ # \"is_render\":False, # \"is_good_view\":False, # \"max_steps_one_episode\":1000, # } #",
"import register_env, register_trainable from ray.rllib.agents.ppo import PPOTrainer from ray.rllib.agents.impala import ImpalaTrainer if __name__=='__main__':",
"\"num_gpus\":1, \"framework\":\"torch\", # \"render_env\":False, # \"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000, # \"train_batch_size\":4000, #",
"config: KukaCamReachEnv(config)) config = { \"env\": \"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\",",
"as ppo from ray.tune.logger import pretty_print from env import CustomSkipFrame, KukaCamReachEnv from ray",
"from ray.tune.logger import pretty_print from env import CustomSkipFrame, KukaCamReachEnv from ray import tune",
"\"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # # \"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\", # }, \"env_config\":{",
"# print(trainer.get_policy().model) # # trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model) results = tune.run( \"SAC\", # Specify",
"import ImpalaTrainer if __name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True) # env_config={ # \"is_render\":False, # \"is_good_view\":False, #",
"# env_config={ # \"is_render\":False, # \"is_good_view\":False, # \"max_steps_one_episode\":1000, # } # env=KukaCamReachEnv(env_config) #",
"# \"is_render\":False, # \"is_good_view\":False, # \"max_steps_one_episode\":1000, # } # env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda",
"grid_search from ray.rllib.env.env_context import EnvContext from ray.tune.registry import register_env, register_trainable from ray.rllib.agents.ppo import",
"{ \"env\": \"kuka_env\", # \"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # # \"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251], #",
"__name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True) # env_config={ # \"is_render\":False, # \"is_good_view\":False, # \"max_steps_one_episode\":1000, # }",
"{ \"episode_reward_mean\": 0.99, \"training_iteration\":200, } # trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model) # # trainer=ImpalaTrainer(config=config_for_trainer) #",
"Specify the algorithm to train config=config, stop=stop, checkpoint_freq=1, ) metric=\"episode_reward_mean\" best_trial = results.get_best_trial(metric=metric,",
"PPOTrainer from ray.rllib.agents.impala import ImpalaTrainer if __name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True) # env_config={ # \"is_render\":False,",
"ppo from ray.tune.logger import pretty_print from env import CustomSkipFrame, KukaCamReachEnv from ray import",
"\"env\": \"kuka_env\", # \"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # # \"conv_filters\":\"relu\", # \"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\",",
"from ray.rllib.agents.ppo import PPOTrainer from ray.rllib.agents.impala import ImpalaTrainer if __name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True) #",
"\"framework\":\"torch\", } stop = { \"episode_reward_mean\": 0.99, \"training_iteration\":200, } # trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model)",
"\"post_fcnet_activation\":\"relu\", # }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\", } stop",
"ray.tune import grid_search from ray.rllib.env.env_context import EnvContext from ray.tune.registry import register_env, register_trainable from",
"\"max_steps_one_episode\":1000, }, \"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\", # \"render_env\":False, # \"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000,",
"\"lr\":grid_search([5e-5,0.0001]) } config_for_trainer = { \"env\": \"kuka_env\", # \"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # #",
"algorithm to train config=config, stop=stop, checkpoint_freq=1, ) metric=\"episode_reward_mean\" best_trial = results.get_best_trial(metric=metric, mode=\"max\", scope=\"all\")",
"\"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\", # \"render_env\":False, # \"num_gpus_per_worker\":0, #",
"config=config, stop=stop, checkpoint_freq=1, ) metric=\"episode_reward_mean\" best_trial = results.get_best_trial(metric=metric, mode=\"max\", scope=\"all\") best_checkpoint=results.get_best_checkpoint(best_trial,metric=metric,mode=\"max\") print('best checkpoint:",
"}, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\", # \"render_env\":False, # \"num_gpus_per_worker\":0,",
"ray.rllib.agents.ppo as ppo from ray.tune.logger import pretty_print from env import CustomSkipFrame, KukaCamReachEnv from",
"CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config)) config = { \"env\": \"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\",",
"# trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model) # # trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model) results = tune.run( \"SAC\",",
"EnvContext from ray.tune.registry import register_env, register_trainable from ray.rllib.agents.ppo import PPOTrainer from ray.rllib.agents.impala import",
"\"SAC\", # Specify the algorithm to train config=config, stop=stop, checkpoint_freq=1, ) metric=\"episode_reward_mean\" best_trial",
"# \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\",",
"checkpoint_freq=1, ) metric=\"episode_reward_mean\" best_trial = results.get_best_trial(metric=metric, mode=\"max\", scope=\"all\") best_checkpoint=results.get_best_checkpoint(best_trial,metric=metric,mode=\"max\") print('best checkpoint: ',best_checkpoint) ray.shutdown()",
"ImpalaTrainer if __name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True) # env_config={ # \"is_render\":False, # \"is_good_view\":False, # \"max_steps_one_episode\":1000,",
"tune.run( \"SAC\", # Specify the algorithm to train config=config, stop=stop, checkpoint_freq=1, ) metric=\"episode_reward_mean\"",
"from ray.rllib.env.env_context import EnvContext from ray.tune.registry import register_env, register_trainable from ray.rllib.agents.ppo import PPOTrainer",
"\"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\", } stop = { \"episode_reward_mean\": 0.99, \"training_iteration\":200, } # trainer=PPOTrainer(config=config_for_trainer)",
"tune from ray.tune import grid_search from ray.rllib.env.env_context import EnvContext from ray.tune.registry import register_env,",
"config = { \"env\": \"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", }, \"env_config\":{",
"import ray import ray.rllib.agents.ppo as ppo from ray.tune.logger import pretty_print from env import",
"# \"post_fcnet_hiddens\":[512,251], # \"post_fcnet_activation\":\"relu\", # }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":1, \"num_gpus\":1,",
"ray.tune.registry import register_env, register_trainable from ray.rllib.agents.ppo import PPOTrainer from ray.rllib.agents.impala import ImpalaTrainer if",
"import time import ray import ray.rllib.agents.ppo as ppo from ray.tune.logger import pretty_print from",
"\"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\", # \"render_env\":False, # \"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000, # \"train_batch_size\":4000,",
"# env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config)) config = {",
"# \"is_good_view\":False, # \"max_steps_one_episode\":1000, # } # env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config)))",
"from env import CustomSkipFrame, KukaCamReachEnv from ray import tune from ray.tune import grid_search",
"import CustomSkipFrame, KukaCamReachEnv from ray import tune from ray.tune import grid_search from ray.rllib.env.env_context",
"\"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001]) } config_for_trainer = { \"env\": \"kuka_env\", # \"model\":{ #",
"\"max_steps_one_episode\":1000, }, \"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\", } stop = { \"episode_reward_mean\": 0.99, \"training_iteration\":200, }",
"register_trainable from ray.rllib.agents.ppo import PPOTrainer from ray.rllib.agents.impala import ImpalaTrainer if __name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True)",
"trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model) # # trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model) results = tune.run( \"SAC\", #",
"if __name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True) # env_config={ # \"is_render\":False, # \"is_good_view\":False, # \"max_steps_one_episode\":1000, #",
"import PPOTrainer from ray.rllib.agents.impala import ImpalaTrainer if __name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True) # env_config={ #",
"#register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config)) config = { \"env\": \"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251],",
"# \"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000, # \"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001])",
"\"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\", } stop = { \"episode_reward_mean\": 0.99, \"training_iteration\":200,",
"\"is_render\":False, # \"is_good_view\":False, # \"max_steps_one_episode\":1000, # } # env=KukaCamReachEnv(env_config) # env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config:",
"# \"lr\":grid_search([5e-5,0.0001]) } config_for_trainer = { \"env\": \"kuka_env\", # \"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], #",
"# \"post_fcnet_activation\":\"relu\", # }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":1, \"num_gpus\":1, \"framework\":\"torch\", }",
"# # trainer=ImpalaTrainer(config=config_for_trainer) # print(trainer.get_policy().model) results = tune.run( \"SAC\", # Specify the algorithm",
"import pretty_print from env import CustomSkipFrame, KukaCamReachEnv from ray import tune from ray.tune",
"import EnvContext from ray.tune.registry import register_env, register_trainable from ray.rllib.agents.ppo import PPOTrainer from ray.rllib.agents.impala",
"# \"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001]) } config_for_trainer = { \"env\": \"kuka_env\",",
"} stop = { \"episode_reward_mean\": 0.99, \"training_iteration\":200, } # trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model) #",
"\"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\", # \"render_env\":False, # \"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5,",
"ray.rllib.agents.impala import ImpalaTrainer if __name__=='__main__': ray.shutdown() ray.init(ignore_reinit_error=True) # env_config={ # \"is_render\":False, # \"is_good_view\":False,",
"= { \"env\": \"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # \"conv_filters\":\"relu\", \"post_fcnet_hiddens\":[512,251], \"post_fcnet_activation\":\"relu\", }, \"env_config\":{ \"is_render\":False,",
"register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config)) config = { \"env\": \"kuka_env\", \"model\":{ \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]],",
"stop = { \"episode_reward_mean\": 0.99, \"training_iteration\":200, } # trainer=PPOTrainer(config=config_for_trainer) # print(trainer.get_policy().model) # #",
"from ray.tune import grid_search from ray.rllib.env.env_context import EnvContext from ray.tune.registry import register_env, register_trainable",
"env=CustomSkipFrame(env) register_env(\"kuka_env\",lambda config: CustomSkipFrame(KukaCamReachEnv(config))) #register_env(\"kuka_env\",lambda config: KukaCamReachEnv(config)) config = { \"env\": \"kuka_env\", \"model\":{",
"from ray.tune.registry import register_env, register_trainable from ray.rllib.agents.ppo import PPOTrainer from ray.rllib.agents.impala import ImpalaTrainer",
"env import CustomSkipFrame, KukaCamReachEnv from ray import tune from ray.tune import grid_search from",
"import tune from ray.tune import grid_search from ray.rllib.env.env_context import EnvContext from ray.tune.registry import",
"\"post_fcnet_activation\":\"relu\", }, \"env_config\":{ \"is_render\":False, \"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\", # \"render_env\":False, #",
"\"is_good_view\":False, \"max_steps_one_episode\":1000, }, \"num_workers\":10, \"num_gpus\":1, \"framework\":\"torch\", # \"render_env\":False, # \"num_gpus_per_worker\":0, # \"num_envs_per_worker\":5, #",
"# \"num_envs_per_worker\":5, # \"rollout_fragment_length\":1000, # \"train_batch_size\":4000, # \"batch_mode\":\"complete_episodes\", #\"lr\":0.0001, # \"lr\":grid_search([5e-5,0.0001]) } config_for_trainer",
"} config_for_trainer = { \"env\": \"kuka_env\", # \"model\":{ # \"conv_filters\":[[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[32,[3,3],2],[1152,[6,6],1]], # # \"conv_filters\":\"relu\",",
"ray.init(ignore_reinit_error=True) # env_config={ # \"is_render\":False, # \"is_good_view\":False, # \"max_steps_one_episode\":1000, # } # env=KukaCamReachEnv(env_config)",
"the algorithm to train config=config, stop=stop, checkpoint_freq=1, ) metric=\"episode_reward_mean\" best_trial = results.get_best_trial(metric=metric, mode=\"max\","
] |
[
"([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [\"A\"], \"C\": [\"A\"]}), ([(\"A\", \"B\"),",
"\"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"A\"]}, {\"A\": 1, \"B\": 1}), ],",
") def test_inbound_degrees(adj_list, expected): assert inbound_degrees(adj_list) == expected @pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"), [ ({\"A\":",
"\"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}), ], ) def test_directed_adj_list(edges,",
"expected @pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"), [ ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\":",
"import ( undirected_adj_list, directed_adj_list, inbound_degrees, find_sources, ) @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"),",
"test_undirected_adj_list(edges, expected): assert undirected_adj_list(edges) == expected @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\",",
"(\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [], \"C\":",
"\"B\": [], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\":",
"{\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}, {\"A\":",
"\"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}, {\"A\": 0, \"B\":",
"0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 1, \"B\": 1}, deque([])), ], )",
"directed_adj_list(edges) == expected @pytest.mark.parametrize( (\"adj_list\", \"expected\"), [ ({\"A\": [\"B\", \"C\"], \"B\": [], \"C\":",
"inbound_degrees, find_sources, ) @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\",",
"[\"A\"], \"C\": [\"A\"]}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"A\", \"C\"], \"C\":",
"directed_adj_list, inbound_degrees, find_sources, ) @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\":",
"{\"A\": [\"B\"], \"B\": [\"A\", \"C\"], \"C\": [\"B\"]}), ], ) def test_undirected_adj_list(edges, expected): assert",
"\"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"C\"], \"C\":",
"expected @pytest.mark.parametrize( (\"adj_list\", \"expected\"), [ ({\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}, {\"A\":",
") def test_undirected_adj_list(edges, expected): assert undirected_adj_list(edges) == expected @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\",",
"[], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"C\"],",
"undirected_adj_list, directed_adj_list, inbound_degrees, find_sources, ) @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")],",
"\"C\")], {\"A\": [\"B\"], \"B\": [\"A\", \"C\"], \"C\": [\"B\"]}), ], ) def test_undirected_adj_list(edges, expected):",
"\"C\")], {\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\":",
"({\"A\": 1, \"B\": 1}, deque([])), ], ) def test_find_sources(inbounnd_degrees, expected): assert find_sources(inbounnd_degrees) ==",
"({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 1, \"B\": 1}, deque([])), ],",
"0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}, {\"A\": 0,",
"[]}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"A\"]}, {\"A\": 1,",
"\"C\"], \"B\": [], \"C\": []}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"C\"],",
"from collections import deque from pyinterview.graphs import ( undirected_adj_list, directed_adj_list, inbound_degrees, find_sources, )",
"(\"adj_list\", \"expected\"), [ ({\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}, {\"A\": 0, \"B\":",
"from pyinterview.graphs import ( undirected_adj_list, directed_adj_list, inbound_degrees, find_sources, ) @pytest.mark.parametrize( (\"edges\", \"expected\"), [",
"[\"C\"], \"C\": []}), ], ) def test_directed_adj_list(edges, expected): assert directed_adj_list(edges) == expected @pytest.mark.parametrize(",
"1}), ({\"A\": [\"B\"], \"B\": [\"A\"]}, {\"A\": 1, \"B\": 1}), ], ) def test_inbound_degrees(adj_list,",
"def test_inbound_degrees(adj_list, expected): assert inbound_degrees(adj_list) == expected @pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"), [ ({\"A\": 0,",
"== expected @pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"), [ ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])),",
"1}, deque([\"A\"])), ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 1, \"B\": 1},",
"\"B\": [\"A\"], \"C\": [\"A\"]}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"A\", \"C\"],",
"[\"B\", \"C\"], \"B\": [], \"C\": []}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\":",
"expected @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\":",
"\"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\":",
"[\"B\", \"C\"], \"B\": [], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\":",
"\"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"A\"]}, {\"A\": 1, \"B\": 1}), ], ) def",
"import deque from pyinterview.graphs import ( undirected_adj_list, directed_adj_list, inbound_degrees, find_sources, ) @pytest.mark.parametrize( (\"edges\",",
"deque from pyinterview.graphs import ( undirected_adj_list, directed_adj_list, inbound_degrees, find_sources, ) @pytest.mark.parametrize( (\"edges\", \"expected\"),",
"({\"A\": [\"B\"], \"B\": [\"A\"]}, {\"A\": 1, \"B\": 1}), ], ) def test_inbound_degrees(adj_list, expected):",
"({\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\":",
"[ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [\"A\"], \"C\": [\"A\"]}), ([(\"A\",",
"1}), ({\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}),",
"\"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\":",
"def test_undirected_adj_list(edges, expected): assert undirected_adj_list(edges) == expected @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"),",
"\"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"A\", \"C\"], \"C\": [\"B\"]}), ], ) def",
"1, \"B\": 1}), ], ) def test_inbound_degrees(adj_list, expected): assert inbound_degrees(adj_list) == expected @pytest.mark.parametrize(",
"[\"B\"], \"B\": [\"A\", \"C\"], \"C\": [\"B\"]}), ], ) def test_undirected_adj_list(edges, expected): assert undirected_adj_list(edges)",
") def test_directed_adj_list(edges, expected): assert directed_adj_list(edges) == expected @pytest.mark.parametrize( (\"adj_list\", \"expected\"), [ ({\"A\":",
"pytest from collections import deque from pyinterview.graphs import ( undirected_adj_list, directed_adj_list, inbound_degrees, find_sources,",
"deque([\"A\"])), ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 1, \"B\": 1}, deque([])),",
"@pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [\"A\"],",
"([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}), ([(\"A\", \"B\"),",
"@pytest.mark.parametrize( (\"adj_list\", \"expected\"), [ ({\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}, {\"A\": 0,",
"inbound_degrees(adj_list) == expected @pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"), [ ({\"A\": 0, \"B\": 1, \"C\": 1},",
"[]}), ], ) def test_directed_adj_list(edges, expected): assert directed_adj_list(edges) == expected @pytest.mark.parametrize( (\"adj_list\", \"expected\"),",
"\"C\": 1}, deque([\"A\"])), ({\"A\": 1, \"B\": 1}, deque([])), ], ) def test_find_sources(inbounnd_degrees, expected):",
"{\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"A\"]}, {\"A\": 1, \"B\":",
"([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"A\", \"C\"], \"C\": [\"B\"]}), ], )",
"import pytest from collections import deque from pyinterview.graphs import ( undirected_adj_list, directed_adj_list, inbound_degrees,",
"{\"A\": [\"B\", \"C\"], \"B\": [\"A\"], \"C\": [\"A\"]}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"],",
"\"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 1, \"B\": 1}, deque([])), ], ) def",
"\"C\": [\"A\"]}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"A\", \"C\"], \"C\": [\"B\"]}),",
"(\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}), ([(\"A\", \"B\"), (\"B\", \"C\")],",
"\"B\": 1}), ], ) def test_inbound_degrees(adj_list, expected): assert inbound_degrees(adj_list) == expected @pytest.mark.parametrize( (\"inbounnd_degrees\",",
"1, \"C\": 1}, deque([\"A\"])), ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 1,",
"\"C\": []}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}), ],",
"(\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [\"A\"], \"C\":",
"[\"A\"]}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"A\", \"C\"], \"C\": [\"B\"]}), ],",
"({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 0, \"B\": 1, \"C\": 1},",
"<reponame>jodahoney/pyinterview import pytest from collections import deque from pyinterview.graphs import ( undirected_adj_list, directed_adj_list,",
"[]}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}), ], )",
"test_directed_adj_list(edges, expected): assert directed_adj_list(edges) == expected @pytest.mark.parametrize( (\"adj_list\", \"expected\"), [ ({\"A\": [\"B\", \"C\"],",
"\"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}), ([(\"A\", \"B\"), (\"B\",",
"(\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"A\", \"C\"], \"C\": [\"B\"]}), ], ) def test_undirected_adj_list(edges,",
"\"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}),",
"[ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}), ([(\"A\",",
"\"B\": [\"A\"]}, {\"A\": 1, \"B\": 1}), ], ) def test_inbound_degrees(adj_list, expected): assert inbound_degrees(adj_list)",
"\"C\"], \"C\": [\"B\"]}), ], ) def test_undirected_adj_list(edges, expected): assert undirected_adj_list(edges) == expected @pytest.mark.parametrize(",
"(\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [\"A\"], \"C\": [\"A\"]}), ([(\"A\", \"B\"), (\"B\", \"C\")],",
"[\"A\"]}, {\"A\": 1, \"B\": 1}), ], ) def test_inbound_degrees(adj_list, expected): assert inbound_degrees(adj_list) ==",
"0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"A\"]}, {\"A\": 1, \"B\": 1}),",
"[\"B\"], \"B\": [\"A\"]}, {\"A\": 1, \"B\": 1}), ], ) def test_inbound_degrees(adj_list, expected): assert",
"== expected @pytest.mark.parametrize( (\"adj_list\", \"expected\"), [ ({\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []},",
"\"B\": [\"C\"], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\":",
"[\"B\", \"C\"], \"B\": [\"A\"], \"C\": [\"A\"]}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\":",
"(\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}), ], ) def test_directed_adj_list(edges, expected):",
"assert undirected_adj_list(edges) == expected @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\":",
"def test_directed_adj_list(edges, expected): assert directed_adj_list(edges) == expected @pytest.mark.parametrize( (\"adj_list\", \"expected\"), [ ({\"A\": [\"B\",",
"collections import deque from pyinterview.graphs import ( undirected_adj_list, directed_adj_list, inbound_degrees, find_sources, ) @pytest.mark.parametrize(",
"[\"B\"], \"B\": [\"C\"], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"],",
"\"C\"], \"B\": [], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"],",
"\"C\"], \"B\": [\"A\"], \"C\": [\"A\"]}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"A\",",
"1}), ], ) def test_inbound_degrees(adj_list, expected): assert inbound_degrees(adj_list) == expected @pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"),",
"\"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [\"A\"], \"C\": [\"A\"]}), ([(\"A\", \"B\"), (\"B\",",
"[\"C\"], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"A\"]},",
"== expected @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"],",
"\"expected\"), [ ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 0, \"B\": 1,",
"{\"A\": 1, \"B\": 1}), ], ) def test_inbound_degrees(adj_list, expected): assert inbound_degrees(adj_list) == expected",
"1, \"C\": 1}, deque([\"A\"])), ({\"A\": 1, \"B\": 1}, deque([])), ], ) def test_find_sources(inbounnd_degrees,",
"( undirected_adj_list, directed_adj_list, inbound_degrees, find_sources, ) @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\",",
"1}, deque([\"A\"])), ({\"A\": 1, \"B\": 1}, deque([])), ], ) def test_find_sources(inbounnd_degrees, expected): assert",
"assert directed_adj_list(edges) == expected @pytest.mark.parametrize( (\"adj_list\", \"expected\"), [ ({\"A\": [\"B\", \"C\"], \"B\": [],",
"expected): assert undirected_adj_list(edges) == expected @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")],",
"\"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [\"A\"], \"C\": [\"A\"]}),",
"undirected_adj_list(edges) == expected @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\",",
"({\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}),",
"\"B\": [\"A\", \"C\"], \"C\": [\"B\"]}), ], ) def test_undirected_adj_list(edges, expected): assert undirected_adj_list(edges) ==",
"\"C\": 1}, deque([\"A\"])), ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 1, \"B\":",
"[], \"C\": []}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}),",
"[ ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 0, \"B\": 1, \"C\":",
"\"expected\"), [ ({\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}, {\"A\": 0, \"B\": 1,",
"0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])),",
"find_sources, ) @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"],",
"(\"inbounnd_degrees\", \"expected\"), [ ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 0, \"B\":",
"\"C\")], {\"A\": [\"B\", \"C\"], \"B\": [\"A\"], \"C\": [\"A\"]}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\":",
"([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}), ], ) def",
"1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"A\"]}, {\"A\": 1, \"B\": 1}), ], )",
"[\"A\", \"C\"], \"C\": [\"B\"]}), ], ) def test_undirected_adj_list(edges, expected): assert undirected_adj_list(edges) == expected",
"\"C\": []}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"A\"]}, {\"A\":",
"\"C\": [\"B\"]}), ], ) def test_undirected_adj_list(edges, expected): assert undirected_adj_list(edges) == expected @pytest.mark.parametrize( (\"edges\",",
"1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}, {\"A\": 0, \"B\": 1,",
"pyinterview.graphs import ( undirected_adj_list, directed_adj_list, inbound_degrees, find_sources, ) @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\",",
"], ) def test_directed_adj_list(edges, expected): assert directed_adj_list(edges) == expected @pytest.mark.parametrize( (\"adj_list\", \"expected\"), [",
"1, \"B\": 1}, deque([])), ], ) def test_find_sources(inbounnd_degrees, expected): assert find_sources(inbounnd_degrees) == expected",
"[]}, {\"A\": 0, \"B\": 1, \"C\": 1}), ({\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []},",
"[ ({\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}, {\"A\": 0, \"B\": 1, \"C\":",
"{\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}), ], ) def test_directed_adj_list(edges, expected): assert directed_adj_list(edges)",
"expected): assert directed_adj_list(edges) == expected @pytest.mark.parametrize( (\"adj_list\", \"expected\"), [ ({\"A\": [\"B\", \"C\"], \"B\":",
"\"B\": [], \"C\": []}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"], \"B\": [\"C\"], \"C\":",
"[\"B\"]}), ], ) def test_undirected_adj_list(edges, expected): assert undirected_adj_list(edges) == expected @pytest.mark.parametrize( (\"edges\", \"expected\"),",
"deque([\"A\"])), ({\"A\": 1, \"B\": 1}, deque([])), ], ) def test_find_sources(inbounnd_degrees, expected): assert find_sources(inbounnd_degrees)",
"], ) def test_undirected_adj_list(edges, expected): assert undirected_adj_list(edges) == expected @pytest.mark.parametrize( (\"edges\", \"expected\"), [",
"@pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"), [ ({\"A\": 0, \"B\": 1, \"C\": 1}, deque([\"A\"])), ({\"A\": 0,",
"\"C\")], {\"A\": [\"B\"], \"B\": [\"C\"], \"C\": []}), ], ) def test_directed_adj_list(edges, expected): assert",
"\"B\": [\"C\"], \"C\": []}), ], ) def test_directed_adj_list(edges, expected): assert directed_adj_list(edges) == expected",
"{\"A\": [\"B\", \"C\"], \"B\": [], \"C\": []}), ([(\"A\", \"B\"), (\"B\", \"C\")], {\"A\": [\"B\"],",
") @pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\":",
"\"C\": []}), ], ) def test_directed_adj_list(edges, expected): assert directed_adj_list(edges) == expected @pytest.mark.parametrize( (\"adj_list\",",
"expected): assert inbound_degrees(adj_list) == expected @pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"), [ ({\"A\": 0, \"B\": 1,",
"assert inbound_degrees(adj_list) == expected @pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"), [ ({\"A\": 0, \"B\": 1, \"C\":",
"], ) def test_inbound_degrees(adj_list, expected): assert inbound_degrees(adj_list) == expected @pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"), [",
"[\"B\"], \"B\": [\"C\"], \"C\": []}), ], ) def test_directed_adj_list(edges, expected): assert directed_adj_list(edges) ==",
"test_inbound_degrees(adj_list, expected): assert inbound_degrees(adj_list) == expected @pytest.mark.parametrize( (\"inbounnd_degrees\", \"expected\"), [ ({\"A\": 0, \"B\":",
"@pytest.mark.parametrize( (\"edges\", \"expected\"), [ ([(\"A\", \"B\"), (\"A\", \"C\")], {\"A\": [\"B\", \"C\"], \"B\": [],"
] |
[
"def post(self, request, slug): contention = self.get_object() if check_content_deletion(contention): # remove notification Entry.objects.delete(contention.get_newsfeed_type(),",
"contentions class AboutView(TemplateView): template_name = \"about.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"about.md\")) return",
"= self.get_object() view = (\"list-view\" if self.request.GET.get(\"view\") == \"list\" else \"tree-view\") edit_mode =",
"django.utils import timezone from django.db.models import Max from django.utils.timezone import now from django.http",
"return '?offset=%(offset)s&keywords=%(keywords)s' % { \"offset\": offset, \"keywords\": self.get_keywords() } def get_contentions(self, paginate=True): keywords",
"import (ArgumentCreationForm, PremiseCreationForm, PremiseEditForm, ReportForm) from premises.signals import (added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise) from",
"content = markdown(render_to_string(\"about.md\")) return super(AboutView, self).get_context_data( content=content, **kwargs) class TosView(TemplateView): template_name = \"tos.html\"",
"\"parent\": None, \"pk\": contention.pk, \"owner\": contention.owner, \"sources\": contention.sources, \"is_singular\": self.is_singular(contention), \"children\": self.get_premises(contention, user)",
"\"list\" else \"tree-view\") edit_mode = ( self.request.user.is_superuser or self.request.user.is_staff or contention.user == self.request.user)",
"self.get_object(self.get_queryset()) return HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention, self.request.user), }), content_type=\"application/json\") def build_tree(self, contention, user): return",
"if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class UpdatedArgumentsView(HomeView): tab_class = \"updated\" def",
"= \"premises/new_contention.html\" form_class = ArgumentCreationForm def form_valid(self, form): form.instance.user = self.request.user form.instance.ip_address =",
"else: result = (Contention .objects .filter(title__icontains=keywords)) if paginate: result = result[self.get_offset():self.get_limit()] return result",
"class ArgumentDeleteView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object()",
"!= premise.user: return not premise.reported_by(user) return False def is_singular(self, contention): result = (contention",
"self.get_premises(contention, user) } def get_premises(self, contention, user, parent=None): children = [{ \"pk\": premise.pk,",
"children def user_can_report(self, premise, user): if user.is_authenticated() and user != premise.user: return not",
"list(notifications_qs) self.mark_as_read(notifications_qs) else: notifications = None return super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(),",
".filter(date_modification__gte=last_week)) if paginate: return contentions[self.get_offset():self.get_limit()] return contentions class AboutView(TemplateView): template_name = \"about.html\" def",
"} def form_valid(self, form): contention = self.get_contention() premise = self.get_premise() form.instance.contention = contention",
"from django.core.urlresolvers import reverse from django.utils import timezone from django.db.models import Max from",
"check_content_deletion(contention): # remove notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete() messages.info(request, u\"Argümanınız silindi.\") return redirect(\"home\") else:",
"= self.get_premise() premise.supporters.remove(self.request.user) return redirect(self.get_contention()) post = delete class PremiseDeleteView(View): def get_premise(self): if",
"self).get_context_data( contention=self.get_contention(), parent=self.get_parent(), **kwargs) def form_valid(self, form): contention = self.get_contention() form.instance.user = self.request.user",
"ArgumentDeleteView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() if",
"not keywords or len(keywords) < 2: result = Contention.objects.none() else: result = (Contention",
"= super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentPublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def",
"return result['max_sibling'] <= 1 class HomeView(TemplateView): template_name = \"index.html\" tab_class = \"featured\" paginate_by",
"if form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance) else: added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification = timezone.now() contention.save() return redirect(contention)",
"\"id\": premise.user.id, \"username\": premise.user.username, \"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username]) }, \"sources\": premise.sources, \"premise_type\": premise.premise_class(), \"children\":",
"def post(self, request, slug): contention = self.get_object() if contention.premises.exists(): contention.is_published = True contention.save()",
".aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling'] <= 1 class HomeView(TemplateView): template_name = \"index.html\" tab_class = \"featured\"",
"redirect from django.template.loader import render_to_string from django.views.generic import DetailView, TemplateView, CreateView, View from",
"HttpResponse from django.shortcuts import get_object_or_404, redirect from django.template.loader import render_to_string from django.views.generic import",
"= self.get_object() if contention.premises.exists(): contention.is_published = True contention.save() messages.info(request, u\"Argüman yayına alındı.\") else:",
"return super(ContentionDetailView, self).get_context_data( view=view, path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs) class ContentionJsonView(DetailView): model = Contention def",
"next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions, **kwargs) def get_announcements(self): return Post.objects.filter(is_announcement=True) def get_offset(self):",
"offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s' % { \"offset\": offset, \"keywords\": self.get_keywords()",
"import (added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise) from premises.templatetags.premise_tags import check_content_deletion from newsfeed.models import Entry",
"ContentionDetailView(DetailView): template_name = \"premises/contention_detail.html\" model = Contention def get_context_data(self, **kwargs): contention = self.get_object()",
".objects .filter(is_published=True) .order_by('-date_modification')) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class ControversialArgumentsView(HomeView): tab_class",
"post(self, request, slug): contention = self.get_object() if contention.premises.exists(): contention.is_published = True contention.save() messages.info(request,",
"contention form.instance.parent = self.get_parent() form.instance.is_approved = True form.instance.ip_address = self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts() if",
"(self.get_offset() + self.paginate_by) def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s' %",
"contention.save() messages.info(request, u\"Argüman yayına alındı.\") else: messages.info(request, u\"Argümanı yayına almadan önce en az",
"redirect(contention) class ArgumentUnpublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention =",
"return int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self): return self.get_offset() + self.paginate_by def has_next_page(self): total = self.get_contentions(paginate=False).count()",
"premises = Premise.objects.all() else: premises = Premise.objects.filter(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def delete(self, request,",
"self.get_parent() form.instance.is_approved = True form.instance.ip_address = self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts() if form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance)",
"= 'search' def get_context_data(self, **kwargs): return super(SearchView, self).get_context_data( keywords=self.get_keywords(), **kwargs ) def get_keywords(self):",
"premise.text, \"parent\": parent.text if parent else None, \"reportable_by_authenticated_user\": self.user_can_report(premise, user), \"report_count\": premise.reports.count(), \"user\":",
"silinecek durumda değil.\") return redirect(contention) delete = post class PremiseEditView(UpdateView): template_name = \"premises/edit_premise.html\"",
"paginate: return contentions[self.get_offset():self.get_limit()] return contentions class AboutView(TemplateView): template_name = \"about.html\" def get_context_data(self, **kwargs):",
"u\"Argüman yayına alındı.\") else: messages.info(request, u\"Argümanı yayına almadan önce en az 1 \"",
"self.request.user.is_superuser: return premises return premises.filter(user=self.request.user) def form_valid(self, form): response = super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts()",
"notifications = None return super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions, **kwargs)",
"ekleyin.\") return redirect(contention) class ArgumentUnpublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug):",
"class ArgumentUnpublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object()",
"django.views.generic import DetailView, TemplateView, CreateView, View from django.views.generic.edit import UpdateView from django.db.models import",
"int_or_zero from premises.models import Contention, Premise from premises.forms import (ArgumentCreationForm, PremiseCreationForm, PremiseEditForm, ReportForm)",
"= True form.instance.ip_address = self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts() if form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance) else: added_premise_for_contention.send(sender=self,",
"return children def user_can_report(self, premise, user): if user.is_authenticated() and user != premise.user: return",
"} def get_unread_notifications(self): return (self.request.user .notifications .filter(is_read=False) [:5]) def mark_as_read(self, notifications): pks =",
"self).get_context_data( keywords=self.get_keywords(), **kwargs ) def get_keywords(self): return self.request.GET.get('keywords') or \"\" def get_next_page_url(self): offset",
"= markdown(render_to_string(\"about.md\")) return super(AboutView, self).get_context_data( content=content, **kwargs) class TosView(TemplateView): template_name = \"tos.html\" def",
"[{ \"pk\": premise.pk, \"name\": premise.text, \"parent\": parent.text if parent else None, \"reportable_by_authenticated_user\": self.user_can_report(premise,",
"import check_content_deletion from newsfeed.models import Entry class ContentionDetailView(DetailView): template_name = \"premises/contention_detail.html\" model =",
"#contention=self.get_contention(), **kwargs) class PremiseCreationView(CreateView): template_name = \"premises/new_premise.html\" form_class = PremiseCreationForm def get_context_data(self, **kwargs):",
"'?offset=%(offset)s' % { \"offset\": offset } def get_unread_notifications(self): return (self.request.user .notifications .filter(is_read=False) [:5])",
"from django.contrib import messages from django.core.urlresolvers import reverse from django.utils import timezone from",
"self.get_object() if contention.premises.exists(): contention.is_published = True contention.save() messages.info(request, u\"Argüman yayına alındı.\") else: messages.info(request,",
"django.template.loader import render_to_string from django.views.generic import DetailView, TemplateView, CreateView, View from django.views.generic.edit import",
"class ArgumentCreationView(CreateView): template_name = \"premises/new_contention.html\" form_class = ArgumentCreationForm def form_valid(self, form): form.instance.user =",
"tab_class = 'search' def get_context_data(self, **kwargs): return super(SearchView, self).get_context_data( keywords=self.get_keywords(), **kwargs ) def",
"= self.kwargs.get(\"pk\") if parent_pk: return get_object_or_404(Premise, pk=parent_pk) class PremiseSupportView(View): def get_premise(self): premises =",
"= self.get_parent() form.instance.is_approved = True form.instance.ip_address = self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts() if form.instance.parent: added_premise_for_premise.send(sender=self,",
"contentions class NotificationsView(HomeView): template_name = \"notifications.html\" def get_context_data(self, **kwargs): notifications_qs = self.request.user.notifications.all()[:40] notifications",
"= self.request.user response = super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentPublishView(DetailView): def get_queryset(self):",
"\"report_count\": premise.reports.count(), \"user\": { \"id\": premise.user.id, \"username\": premise.user.username, \"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username]) }, \"sources\":",
"None, \"pk\": contention.pk, \"owner\": contention.owner, \"sources\": contention.sources, \"is_singular\": self.is_singular(contention), \"children\": self.get_premises(contention, user) }",
"template_name = \"tos.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"tos.md\")) return super(TosView, self).get_context_data( content=content,",
"result = Contention.objects.none() else: result = (Contention .objects .filter(title__icontains=keywords)) if paginate: result =",
"form_class = ArgumentCreationForm def form_valid(self, form): form.instance.user = self.request.user form.instance.ip_address = self.request.META['REMOTE_ADDR'] response",
"**kwargs) class ArgumentCreationView(CreateView): template_name = \"premises/new_contention.html\" form_class = ArgumentCreationForm def form_valid(self, form): form.instance.user",
"Entry class ContentionDetailView(DetailView): template_name = \"premises/contention_detail.html\" model = Contention def get_context_data(self, **kwargs): contention",
"**kwargs): return super(SearchView, self).get_context_data( keywords=self.get_keywords(), **kwargs ) def get_keywords(self): return self.request.GET.get('keywords') or \"\"",
"def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_premise(self): return get_object_or_404(Premise, pk=self.kwargs['pk']) def get_initial(self): return",
"contention.user == self.request.user) return super(ContentionDetailView, self).get_context_data( view=view, path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs) class ContentionJsonView(DetailView): model",
"\"sources\": premise.sources, \"premise_type\": premise.premise_class(), \"children\": (self.get_premises(contention, user, parent=premise) if premise.published_children().exists() else []) }",
"class TosView(TemplateView): template_name = \"tos.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"tos.md\")) return super(TosView,",
"*args, **kwargs): premise = self.get_premise() premise.supporters.remove(self.request.user) return redirect(self.get_contention()) post = delete class PremiseDeleteView(View):",
"last_week = now() - timedelta(days=3) contentions = (Contention .objects .annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week)) if",
"paginate: contentions = (contentions[self.get_offset(): self.get_limit()]) return contentions class NotificationsView(HomeView): template_name = \"notifications.html\" def",
"-*- import json from datetime import timedelta from markdown2 import markdown from django.contrib",
"Count from blog.models import Post from premises.utils import int_or_zero from premises.models import Contention,",
"{ \"name\": contention.title, \"parent\": None, \"pk\": contention.pk, \"owner\": contention.owner, \"sources\": contention.sources, \"is_singular\": self.is_singular(contention),",
"HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention, self.request.user), }), content_type=\"application/json\") def build_tree(self, contention, user): return { \"name\":",
"Max from django.utils.timezone import now from django.http import HttpResponse from django.shortcuts import get_object_or_404,",
"class PremiseUnsupportView(PremiseSupportView): def delete(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.remove(self.request.user) return redirect(self.get_contention())",
"= contention form.instance.premise = premise form.instance.reporter = self.request.user form.save() reported_as_fallacy.send(sender=self, report=form.instance) return redirect(contention)",
"user=self.request.user) return redirect(self.get_contention()) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView): def delete(self, request,",
"form.instance.contention = contention form.instance.premise = premise form.instance.reporter = self.request.user form.save() reported_as_fallacy.send(sender=self, report=form.instance) return",
"= \"premises/edit_contention.html\" form_class = ArgumentCreationForm def get_queryset(self): contentions = Contention.objects.all() if self.request.user.is_superuser: return",
"messages.info(request, u\"Argüman yayına alındı.\") else: messages.info(request, u\"Argümanı yayına almadan önce en az 1",
"import UpdateView from django.db.models import Count from blog.models import Post from premises.utils import",
"(self.get_premises(contention, user, parent=premise) if premise.published_children().exists() else []) } for premise in contention.published_premises(parent)] return",
"self.get_unread_notifications() notifications = list(notifications_qs) self.mark_as_read(notifications_qs) else: notifications = None return super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(),",
"return redirect(\"home\") else: messages.info(request, u\"Argümanınız silinecek durumda değil.\") return redirect(contention) delete = post",
"if parent_pk: return get_object_or_404(Premise, pk=parent_pk) class PremiseSupportView(View): def get_premise(self): premises = Premise.objects.exclude(user=self.request.user) return",
"\"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username]) }, \"sources\": premise.sources, \"premise_type\": premise.premise_class(), \"children\": (self.get_premises(contention, user, parent=premise) if",
"result[self.get_offset():self.get_limit()] return result class NewsView(HomeView): tab_class = \"news\" def get_contentions(self, paginate=True): contentions =",
"contention.title, \"parent\": None, \"pk\": contention.pk, \"owner\": contention.owner, \"sources\": contention.sources, \"is_singular\": self.is_singular(contention), \"children\": self.get_premises(contention,",
"get_parent(self): parent_pk = self.kwargs.get(\"pk\") if parent_pk: return get_object_or_404(Premise, pk=parent_pk) class PremiseSupportView(View): def get_premise(self):",
"premise=premise, user=self.request.user) return redirect(self.get_contention()) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView): def delete(self,",
"redirect(\"home\") else: messages.info(request, u\"Argümanınız silinecek durumda değil.\") return redirect(contention) delete = post class",
"redirect(contention) delete = post class PremiseEditView(UpdateView): template_name = \"premises/edit_premise.html\" form_class = PremiseEditForm def",
"= result[self.get_offset():self.get_limit()] return result class NewsView(HomeView): tab_class = \"news\" def get_contentions(self, paginate=True): contentions",
"if paginate: return contentions[self.get_offset():self.get_limit()] return contentions class AboutView(TemplateView): template_name = \"about.html\" def get_context_data(self,",
"*args, **kwargs): premise = self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise, user=self.request.user) return redirect(self.get_contention()) def get_contention(self):",
"= ReportForm template_name = \"premises/report.html\" def get_context_data(self, **kwargs): return super(ReportView, self).get_context_data( premise=self.get_premise(), **kwargs)",
"get_object_or_404(Contention, slug=self.kwargs['slug']) def get_premise(self): return get_object_or_404(Premise, pk=self.kwargs['pk']) def get_initial(self): return { 'contention': self.get_contention(),",
"contentions = contentions[self.get_offset():self.get_limit()] return contentions class UpdatedArgumentsView(HomeView): tab_class = \"updated\" def get_contentions(self, paginate=True):",
"self.request.user.is_superuser or self.request.user.is_staff or contention.user == self.request.user) return super(ContentionDetailView, self).get_context_data( view=view, path=contention.get_absolute_url(), edit_mode=edit_mode,",
") def get_keywords(self): return self.request.GET.get('keywords') or \"\" def get_next_page_url(self): offset = self.get_offset() +",
"self).get_context_data( #contention=self.get_contention(), **kwargs) class PremiseCreationView(CreateView): template_name = \"premises/new_premise.html\" form_class = PremiseCreationForm def get_context_data(self,",
"get_limit(self): return self.get_offset() + self.paginate_by def has_next_page(self): total = self.get_contentions(paginate=False).count() return total >",
"\"name\": premise.text, \"parent\": parent.text if parent else None, \"reportable_by_authenticated_user\": self.user_can_report(premise, user), \"report_count\": premise.reports.count(),",
"return HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention, self.request.user), }), content_type=\"application/json\") def build_tree(self, contention, user): return {",
"= self.request.GET.get('keywords') if not keywords or len(keywords) < 2: result = Contention.objects.none() else:",
"(Contention .objects .filter(is_published=True) .order_by('-date_modification')) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class ControversialArgumentsView(HomeView):",
"get_context_data(self, **kwargs): return super(ReportView, self).get_context_data( premise=self.get_premise(), **kwargs) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def",
"timezone.now() contention.save() return redirect(contention) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_parent(self): parent_pk =",
"get_context_data(self, **kwargs): contention = self.get_object() view = (\"list-view\" if self.request.GET.get(\"view\") == \"list\" else",
"form): form.instance.user = self.request.user response = super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentPublishView(DetailView):",
"parent=premise) if premise.published_children().exists() else []) } for premise in contention.published_premises(parent)] return children def",
"response def get_context_data(self, **kwargs): return super(PremiseEditView, self).get_context_data( #contention=self.get_contention(), **kwargs) class PremiseCreationView(CreateView): template_name =",
"premise = self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise, user=self.request.user) return redirect(self.get_contention()) def get_contention(self): return get_object_or_404(Contention,",
"pk=self.kwargs['pk']) def delete(self, request, *args, **kwargs): premise = self.get_premise() premise.delete() premise.update_sibling_counts() contention =",
"import Max from django.utils.timezone import now from django.http import HttpResponse from django.shortcuts import",
"premise = self.get_premise() premise.delete() premise.update_sibling_counts() contention = self.get_contention() if not contention.premises.exists(): contention.is_published =",
"class ReportView(CreateView): form_class = ReportForm template_name = \"premises/report.html\" def get_context_data(self, **kwargs): return super(ReportView,",
"contention=self.get_contention(), parent=self.get_parent(), **kwargs) def form_valid(self, form): contention = self.get_contention() form.instance.user = self.request.user form.instance.argument",
"get_object_or_404(Contention, slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView): def delete(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.remove(self.request.user)",
"messages.info(request, u\"Argümanı yayına almadan önce en az 1 \" u\"önerme ekleyin.\") return redirect(contention)",
"= post class PremiseEditView(UpdateView): template_name = \"premises/edit_premise.html\" form_class = PremiseEditForm def get_queryset(self): premises",
"alındı.\") else: messages.info(request, u\"Argümanı yayına almadan önce en az 1 \" u\"önerme ekleyin.\")",
"paginate=True): contentions = (Contention .objects .featured()) if paginate: contentions = (contentions[self.get_offset(): self.get_limit()]) return",
"timedelta(days=3) contentions = (Contention .objects .annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week)) if paginate: return contentions[self.get_offset():self.get_limit()] return",
"get_context_data(self, **kwargs): notifications_qs = self.request.user.notifications.all()[:40] notifications = list(notifications_qs) self.mark_as_read(notifications_qs) return super(HomeView, self).get_context_data( notifications=notifications,",
"premises = Premise.objects.all() if self.request.user.is_superuser: return premises return premises.filter(user=self.request.user) def form_valid(self, form): response",
"self.paginate_by return '?offset=%(offset)s' % { \"offset\": offset } def get_unread_notifications(self): return (self.request.user .notifications",
".filter(is_published=True) .order_by('-date_modification')) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class ControversialArgumentsView(HomeView): tab_class =",
"markdown(render_to_string(\"about.md\")) return super(AboutView, self).get_context_data( content=content, **kwargs) class TosView(TemplateView): template_name = \"tos.html\" def get_context_data(self,",
"Post.objects.filter(is_announcement=True) def get_offset(self): return int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self): return self.get_offset() + self.paginate_by def has_next_page(self):",
"get_object_or_404, redirect from django.template.loader import render_to_string from django.views.generic import DetailView, TemplateView, CreateView, View",
"result class NewsView(HomeView): tab_class = \"news\" def get_contentions(self, paginate=True): contentions = Contention.objects.filter( is_published=True)",
"**kwargs) class PremiseCreationView(CreateView): template_name = \"premises/new_premise.html\" form_class = PremiseCreationForm def get_context_data(self, **kwargs): return",
"= self.get_offset() + self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s' % { \"offset\": offset, \"keywords\": self.get_keywords() }",
"slug): contention = self.get_object() contention.is_published = False contention.save() messages.info(request, u\"Argüman yayından kaldırıldı.\") return",
"template_name = \"about.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"about.md\")) return super(AboutView, self).get_context_data( content=content,",
"import get_object_or_404, redirect from django.template.loader import render_to_string from django.views.generic import DetailView, TemplateView, CreateView,",
"from markdown2 import markdown from django.contrib import messages from django.core.urlresolvers import reverse from",
"return super(TosView, self).get_context_data( content=content, **kwargs) class ArgumentCreationView(CreateView): template_name = \"premises/new_contention.html\" form_class = ArgumentCreationForm",
"return redirect(self.get_contention()) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView): def delete(self, request, *args,",
"response = super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentUpdateView(UpdateView): template_name = \"premises/edit_contention.html\" form_class",
"redirect(self.get_contention()) post = delete class PremiseDeleteView(View): def get_premise(self): if self.request.user.is_staff: premises = Premise.objects.all()",
"1 \" u\"önerme ekleyin.\") return redirect(contention) class ArgumentUnpublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def",
"\"premise_type\": premise.premise_class(), \"children\": (self.get_premises(contention, user, parent=premise) if premise.published_children().exists() else []) } for premise",
"redirect(contention) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_parent(self): parent_pk = self.kwargs.get(\"pk\") if parent_pk:",
"\"offset\": offset } def get_unread_notifications(self): return (self.request.user .notifications .filter(is_read=False) [:5]) def mark_as_read(self, notifications):",
"= markdown(render_to_string(\"tos.md\")) return super(TosView, self).get_context_data( content=content, **kwargs) class ArgumentCreationView(CreateView): template_name = \"premises/new_contention.html\" form_class",
"\"tos.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"tos.md\")) return super(TosView, self).get_context_data( content=content, **kwargs) class",
"self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentUpdateView(UpdateView): template_name = \"premises/edit_contention.html\" form_class = ArgumentCreationForm def",
"pks = notifications.values_list(\"id\", flat=True) (self.request.user .notifications .filter(id__in=pks) .update(is_read=True)) def get_contentions(self, paginate=True): contentions =",
"self.request.user), }), content_type=\"application/json\") def build_tree(self, contention, user): return { \"name\": contention.title, \"parent\": None,",
"**kwargs): content = markdown(render_to_string(\"about.md\")) return super(AboutView, self).get_context_data( content=content, **kwargs) class TosView(TemplateView): template_name =",
"redirect(contention) post = delete def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class ReportView(CreateView): form_class =",
"contentions=contentions, **kwargs) def get_announcements(self): return Post.objects.filter(is_announcement=True) def get_offset(self): return int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self): return",
"= False contention.save() return redirect(contention) post = delete def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug'])",
"parent else None, \"reportable_by_authenticated_user\": self.user_can_report(premise, user), \"report_count\": premise.reports.count(), \"user\": { \"id\": premise.user.id, \"username\":",
"def get_parent(self): parent_pk = self.kwargs.get(\"pk\") if parent_pk: return get_object_or_404(Premise, pk=parent_pk) class PremiseSupportView(View): def",
"has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions, **kwargs) def get_announcements(self): return Post.objects.filter(is_announcement=True) def get_offset(self): return int_or_zero(self.request.GET.get(\"offset\")) def",
"get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords') if not keywords or len(keywords) < 2: result",
"= \"featured\" paginate_by = 20 def get_context_data(self, **kwargs): contentions = self.get_contentions() if self.request.user.is_authenticated():",
"view = (\"list-view\" if self.request.GET.get(\"view\") == \"list\" else \"tree-view\") edit_mode = ( self.request.user.is_superuser",
"self.get_premise(), 'reporter': self.request.user } def form_valid(self, form): contention = self.get_contention() premise = self.get_premise()",
"u\"Argümanı yayına almadan önce en az 1 \" u\"önerme ekleyin.\") return redirect(contention) class",
"premise = self.get_premise() form.instance.contention = contention form.instance.premise = premise form.instance.reporter = self.request.user form.save()",
"edit_mode = ( self.request.user.is_superuser or self.request.user.is_staff or contention.user == self.request.user) return super(ContentionDetailView, self).get_context_data(",
"HomeView(TemplateView): template_name = \"index.html\" tab_class = \"featured\" paginate_by = 20 def get_context_data(self, **kwargs):",
"get_context_data(self, **kwargs): content = markdown(render_to_string(\"about.md\")) return super(AboutView, self).get_context_data( content=content, **kwargs) class TosView(TemplateView): template_name",
"premise.pk, \"name\": premise.text, \"parent\": parent.text if parent else None, \"reportable_by_authenticated_user\": self.user_can_report(premise, user), \"report_count\":",
"else \"tree-view\") edit_mode = ( self.request.user.is_superuser or self.request.user.is_staff or contention.user == self.request.user) return",
"self.paginate_by) def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s' % { \"offset\":",
"form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance) else: added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification = timezone.now() contention.save() return redirect(contention) def",
"\"username\": premise.user.username, \"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username]) }, \"sources\": premise.sources, \"premise_type\": premise.premise_class(), \"children\": (self.get_premises(contention, user,",
"reverse(\"auth_profile\", args=[premise.user.username]) }, \"sources\": premise.sources, \"premise_type\": premise.premise_class(), \"children\": (self.get_premises(contention, user, parent=premise) if premise.published_children().exists()",
"{ \"offset\": offset } def get_unread_notifications(self): return (self.request.user .notifications .filter(is_read=False) [:5]) def mark_as_read(self,",
"PremiseCreationForm def get_context_data(self, **kwargs): return super(PremiseCreationView, self).get_context_data( contention=self.get_contention(), parent=self.get_parent(), **kwargs) def form_valid(self, form):",
"self.kwargs.get(\"pk\") if parent_pk: return get_object_or_404(Premise, pk=parent_pk) class PremiseSupportView(View): def get_premise(self): premises = Premise.objects.exclude(user=self.request.user)",
"super(HomeView, self).get_context_data( notifications=notifications, **kwargs) class SearchView(HomeView): tab_class = 'search' def get_context_data(self, **kwargs): return",
"None, \"reportable_by_authenticated_user\": self.user_can_report(premise, user), \"report_count\": premise.reports.count(), \"user\": { \"id\": premise.user.id, \"username\": premise.user.username, \"absolute_url\":",
"{ \"id\": premise.user.id, \"username\": premise.user.username, \"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username]) }, \"sources\": premise.sources, \"premise_type\": premise.premise_class(),",
"def form_valid(self, form): response = super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts() return response def get_context_data(self, **kwargs):",
"+ self.paginate_by) def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s' % {",
"premise = self.get_premise() premise.supporters.remove(self.request.user) return redirect(self.get_contention()) post = delete class PremiseDeleteView(View): def get_premise(self):",
"user, parent=None): children = [{ \"pk\": premise.pk, \"name\": premise.text, \"parent\": parent.text if parent",
"premises.filter(user=self.request.user) def form_valid(self, form): response = super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts() return response def get_context_data(self,",
"markdown(render_to_string(\"tos.md\")) return super(TosView, self).get_context_data( content=content, **kwargs) class ArgumentCreationView(CreateView): template_name = \"premises/new_contention.html\" form_class =",
"= Premise.objects.all() if self.request.user.is_superuser: return premises return premises.filter(user=self.request.user) def form_valid(self, form): response =",
"contention = self.get_object() if check_content_deletion(contention): # remove notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete() messages.info(request, u\"Argümanınız",
".objects .filter(title__icontains=keywords)) if paginate: result = result[self.get_offset():self.get_limit()] return result class NewsView(HomeView): tab_class =",
"TemplateView, CreateView, View from django.views.generic.edit import UpdateView from django.db.models import Count from blog.models",
"+ self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s' % { \"offset\": offset, \"keywords\": self.get_keywords() } def get_contentions(self,",
"tab_class = \"controversial\" def get_contentions(self, paginate=True): last_week = now() - timedelta(days=3) contentions =",
"return redirect(contention) class ArgumentDeleteView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention",
"flat=True) (self.request.user .notifications .filter(id__in=pks) .update(is_read=True)) def get_contentions(self, paginate=True): contentions = (Contention .objects .featured())",
"delete def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class ReportView(CreateView): form_class = ReportForm template_name =",
"template_name = \"premises/edit_contention.html\" form_class = ArgumentCreationForm def get_queryset(self): contentions = Contention.objects.all() if self.request.user.is_superuser:",
"post = delete class PremiseDeleteView(View): def get_premise(self): if self.request.user.is_staff: premises = Premise.objects.all() else:",
"template_name = \"index.html\" tab_class = \"featured\" paginate_by = 20 def get_context_data(self, **kwargs): contentions",
"form_valid(self, form): contention = self.get_contention() form.instance.user = self.request.user form.instance.argument = contention form.instance.parent =",
"form.instance.ip_address = self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts() if form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance) else: added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification",
"return super(PremiseCreationView, self).get_context_data( contention=self.get_contention(), parent=self.get_parent(), **kwargs) def form_valid(self, form): contention = self.get_contention() form.instance.user",
"= \"about.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"about.md\")) return super(AboutView, self).get_context_data( content=content, **kwargs)",
"from premises.signals import (added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise) from premises.templatetags.premise_tags import check_content_deletion from newsfeed.models",
"contention.premises.exists(): contention.is_published = True contention.save() messages.info(request, u\"Argüman yayına alındı.\") else: messages.info(request, u\"Argümanı yayına",
"} for premise in contention.published_premises(parent)] return children def user_can_report(self, premise, user): if user.is_authenticated()",
"\"premises/edit_contention.html\" form_class = ArgumentCreationForm def get_queryset(self): contentions = Contention.objects.all() if self.request.user.is_superuser: return contentions",
"(Contention .objects .annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week)) if paginate: return contentions[self.get_offset():self.get_limit()] return contentions class AboutView(TemplateView):",
"self.is_singular(contention), \"children\": self.get_premises(contention, user) } def get_premises(self, contention, user, parent=None): children = [{",
"for premise in contention.published_premises(parent)] return children def user_can_report(self, premise, user): if user.is_authenticated() and",
"{ \"offset\": offset, \"keywords\": self.get_keywords() } def get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords') if",
"content_type=\"application/json\") def build_tree(self, contention, user): return { \"name\": contention.title, \"parent\": None, \"pk\": contention.pk,",
"delete(self, request, *args, **kwargs): premise = self.get_premise() premise.delete() premise.update_sibling_counts() contention = self.get_contention() if",
"get_contentions(self, paginate=True): contentions = Contention.objects.filter( is_published=True) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions",
"(ArgumentCreationForm, PremiseCreationForm, PremiseEditForm, ReportForm) from premises.signals import (added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise) from premises.templatetags.premise_tags",
"contentions = Contention.objects.all() if self.request.user.is_superuser: return contentions return contentions.filter(user=self.request.user) def form_valid(self, form): form.instance.user",
"template_name = \"premises/contention_detail.html\" model = Contention def get_context_data(self, **kwargs): contention = self.get_object() view",
"now() - timedelta(days=3) contentions = (Contention .objects .annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week)) if paginate: return",
".order_by('-num_children') .filter(date_modification__gte=last_week)) if paginate: return contentions[self.get_offset():self.get_limit()] return contentions class AboutView(TemplateView): template_name = \"about.html\"",
"= self.get_contention() form.instance.user = self.request.user form.instance.argument = contention form.instance.parent = self.get_parent() form.instance.is_approved =",
"super(TosView, self).get_context_data( content=content, **kwargs) class ArgumentCreationView(CreateView): template_name = \"premises/new_contention.html\" form_class = ArgumentCreationForm def",
"markdown2 import markdown from django.contrib import messages from django.core.urlresolvers import reverse from django.utils",
"def get_contentions(self, paginate=True): contentions = Contention.objects.filter( is_published=True) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return",
"self.request.user response = super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentPublishView(DetailView): def get_queryset(self): return",
"self.request.user form.instance.argument = contention form.instance.parent = self.get_parent() form.instance.is_approved = True form.instance.ip_address = self.request.META['REMOTE_ADDR']",
"messages.info(request, u\"Argümanınız silindi.\") return redirect(\"home\") else: messages.info(request, u\"Argümanınız silinecek durumda değil.\") return redirect(contention)",
"def get_premise(self): if self.request.user.is_staff: premises = Premise.objects.all() else: premises = Premise.objects.filter(user=self.request.user) return get_object_or_404(premises,",
"import HttpResponse from django.shortcuts import get_object_or_404, redirect from django.template.loader import render_to_string from django.views.generic",
"remove notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete() messages.info(request, u\"Argümanınız silindi.\") return redirect(\"home\") else: messages.info(request, u\"Argümanınız",
"{ 'contention': self.get_contention(), 'premise': self.get_premise(), 'reporter': self.request.user } def form_valid(self, form): contention =",
"children = [{ \"pk\": premise.pk, \"name\": premise.text, \"parent\": parent.text if parent else None,",
"return get_object_or_404(Contention, slug=self.kwargs['slug']) class ReportView(CreateView): form_class = ReportForm template_name = \"premises/report.html\" def get_context_data(self,",
"user) } def get_premises(self, contention, user, parent=None): children = [{ \"pk\": premise.pk, \"name\":",
"ArgumentPublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() if",
"import Contention, Premise from premises.forms import (ArgumentCreationForm, PremiseCreationForm, PremiseEditForm, ReportForm) from premises.signals import",
"or contention.user == self.request.user) return super(ContentionDetailView, self).get_context_data( view=view, path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs) class ContentionJsonView(DetailView):",
"**kwargs) def form_valid(self, form): contention = self.get_contention() form.instance.user = self.request.user form.instance.argument = contention",
"Premise.objects.all() else: premises = Premise.objects.filter(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def delete(self, request, *args, **kwargs):",
"premise.published_children().exists() else []) } for premise in contention.published_premises(parent)] return children def user_can_report(self, premise,",
"self.get_object() view = (\"list-view\" if self.request.GET.get(\"view\") == \"list\" else \"tree-view\") edit_mode = (",
"self.get_premise() form.instance.contention = contention form.instance.premise = premise form.instance.reporter = self.request.user form.save() reported_as_fallacy.send(sender=self, report=form.instance)",
"return get_object_or_404(premises, pk=self.kwargs['pk']) def delete(self, request, *args, **kwargs): premise = self.get_premise() premise.delete() premise.update_sibling_counts()",
"return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_parent(self): parent_pk = self.kwargs.get(\"pk\") if parent_pk: return get_object_or_404(Premise, pk=parent_pk)",
"def form_valid(self, form): contention = self.get_contention() premise = self.get_premise() form.instance.contention = contention form.instance.premise",
"(added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise) from premises.templatetags.premise_tags import check_content_deletion from newsfeed.models import Entry class",
"self.user_can_report(premise, user), \"report_count\": premise.reports.count(), \"user\": { \"id\": premise.user.id, \"username\": premise.user.username, \"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username])",
"= Contention.objects.all() if self.request.user.is_superuser: return contentions return contentions.filter(user=self.request.user) def form_valid(self, form): form.instance.user =",
"= self.request.user form.instance.ip_address = self.request.META['REMOTE_ADDR'] response = super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() return response class",
"from django.template.loader import render_to_string from django.views.generic import DetailView, TemplateView, CreateView, View from django.views.generic.edit",
"False contention.save() return redirect(contention) post = delete def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class",
"from premises.templatetags.premise_tags import check_content_deletion from newsfeed.models import Entry class ContentionDetailView(DetailView): template_name = \"premises/contention_detail.html\"",
"import markdown from django.contrib import messages from django.core.urlresolvers import reverse from django.utils import",
"keywords or len(keywords) < 2: result = Contention.objects.none() else: result = (Contention .objects",
"= PremiseEditForm def get_queryset(self): premises = Premise.objects.all() if self.request.user.is_superuser: return premises return premises.filter(user=self.request.user)",
"get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView): def delete(self, request, *args, **kwargs): premise =",
"ArgumentCreationView(CreateView): template_name = \"premises/new_contention.html\" form_class = ArgumentCreationForm def form_valid(self, form): form.instance.user = self.request.user",
"self.get_premise() premise.supporters.remove(self.request.user) return redirect(self.get_contention()) post = delete class PremiseDeleteView(View): def get_premise(self): if self.request.user.is_staff:",
"= delete class PremiseDeleteView(View): def get_premise(self): if self.request.user.is_staff: premises = Premise.objects.all() else: premises",
"premises = Premise.objects.filter(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def delete(self, request, *args, **kwargs): premise =",
"def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class ReportView(CreateView): form_class = ReportForm template_name = \"premises/report.html\"",
"if not contention.premises.exists(): contention.is_published = False contention.save() return redirect(contention) post = delete def",
"notifications_qs = self.request.user.notifications.all()[:40] notifications = list(notifications_qs) self.mark_as_read(notifications_qs) return super(HomeView, self).get_context_data( notifications=notifications, **kwargs) class",
"or \"\" def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s' % {",
"contention.is_published = False contention.save() return redirect(contention) post = delete def get_contention(self): return get_object_or_404(Contention,",
"ArgumentUnpublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() contention.is_published",
"render_to_string from django.views.generic import DetailView, TemplateView, CreateView, View from django.views.generic.edit import UpdateView from",
"**kwargs) class ContentionJsonView(DetailView): model = Contention def render_to_response(self, context, **response_kwargs): contention = self.get_object(self.get_queryset())",
"= [{ \"pk\": premise.pk, \"name\": premise.text, \"parent\": parent.text if parent else None, \"reportable_by_authenticated_user\":",
"int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self): return self.get_offset() + self.paginate_by def has_next_page(self): total = self.get_contentions(paginate=False).count() return",
"result = (Contention .objects .filter(title__icontains=keywords)) if paginate: result = result[self.get_offset():self.get_limit()] return result class",
"AboutView(TemplateView): template_name = \"about.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"about.md\")) return super(AboutView, self).get_context_data(",
"self.request.user.is_superuser: return contentions return contentions.filter(user=self.request.user) def form_valid(self, form): form.instance.user = self.request.user response =",
"\" u\"önerme ekleyin.\") return redirect(contention) class ArgumentUnpublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self,",
"= self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise, user=self.request.user) return redirect(self.get_contention()) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug'])",
"önce en az 1 \" u\"önerme ekleyin.\") return redirect(contention) class ArgumentUnpublishView(DetailView): def get_queryset(self):",
"= list(notifications_qs) self.mark_as_read(notifications_qs) return super(HomeView, self).get_context_data( notifications=notifications, **kwargs) class SearchView(HomeView): tab_class = 'search'",
"= False contention.save() messages.info(request, u\"Argüman yayından kaldırıldı.\") return redirect(contention) class ArgumentDeleteView(DetailView): def get_queryset(self):",
"**kwargs) def get_announcements(self): return Post.objects.filter(is_announcement=True) def get_offset(self): return int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self): return self.get_offset()",
"notifications): pks = notifications.values_list(\"id\", flat=True) (self.request.user .notifications .filter(id__in=pks) .update(is_read=True)) def get_contentions(self, paginate=True): contentions",
"**response_kwargs): contention = self.get_object(self.get_queryset()) return HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention, self.request.user), }), content_type=\"application/json\") def build_tree(self,",
"Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete() messages.info(request, u\"Argümanınız silindi.\") return redirect(\"home\") else: messages.info(request, u\"Argümanınız silinecek durumda",
".objects .featured()) if paginate: contentions = (contentions[self.get_offset(): self.get_limit()]) return contentions class NotificationsView(HomeView): template_name",
".premises .all() .aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling'] <= 1 class HomeView(TemplateView): template_name = \"index.html\" tab_class",
"<= 1 class HomeView(TemplateView): template_name = \"index.html\" tab_class = \"featured\" paginate_by = 20",
"def get_keywords(self): return self.request.GET.get('keywords') or \"\" def get_next_page_url(self): offset = self.get_offset() + self.paginate_by",
"} def get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords') if not keywords or len(keywords) <",
"PremiseDeleteView(View): def get_premise(self): if self.request.user.is_staff: premises = Premise.objects.all() else: premises = Premise.objects.filter(user=self.request.user) return",
"\"premises/report.html\" def get_context_data(self, **kwargs): return super(ReportView, self).get_context_data( premise=self.get_premise(), **kwargs) def get_contention(self): return get_object_or_404(Contention,",
"notifications=notifications, **kwargs) class SearchView(HomeView): tab_class = 'search' def get_context_data(self, **kwargs): return super(SearchView, self).get_context_data(",
"super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentPublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self,",
"2: result = Contention.objects.none() else: result = (Contention .objects .filter(title__icontains=keywords)) if paginate: result",
"= (contentions[self.get_offset(): self.get_limit()]) return contentions class NotificationsView(HomeView): template_name = \"notifications.html\" def get_context_data(self, **kwargs):",
"return contentions return contentions.filter(user=self.request.user) def form_valid(self, form): form.instance.user = self.request.user response = super(ArgumentUpdateView,",
"form.instance.user = self.request.user response = super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentPublishView(DetailView): def",
"contention = self.get_object() if contention.premises.exists(): contention.is_published = True contention.save() messages.info(request, u\"Argüman yayına alındı.\")",
"class ControversialArgumentsView(HomeView): tab_class = \"controversial\" def get_contentions(self, paginate=True): last_week = now() - timedelta(days=3)",
"paginate=True): contentions = (Contention .objects .filter(is_published=True) .order_by('-date_modification')) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return",
"Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() if contention.premises.exists(): contention.is_published = True",
"import Entry class ContentionDetailView(DetailView): template_name = \"premises/contention_detail.html\" model = Contention def get_context_data(self, **kwargs):",
"class NotificationsView(HomeView): template_name = \"notifications.html\" def get_context_data(self, **kwargs): notifications_qs = self.request.user.notifications.all()[:40] notifications =",
"import Count from blog.models import Post from premises.utils import int_or_zero from premises.models import",
"= 20 def get_context_data(self, **kwargs): contentions = self.get_contentions() if self.request.user.is_authenticated(): notifications_qs = self.get_unread_notifications()",
"else: messages.info(request, u\"Argümanı yayına almadan önce en az 1 \" u\"önerme ekleyin.\") return",
"contentions class UpdatedArgumentsView(HomeView): tab_class = \"updated\" def get_contentions(self, paginate=True): contentions = (Contention .objects",
"Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() contention.is_published = False contention.save() messages.info(request,",
"self.get_contention(), 'premise': self.get_premise(), 'reporter': self.request.user } def form_valid(self, form): contention = self.get_contention() premise",
"**kwargs) class TosView(TemplateView): template_name = \"tos.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"tos.md\")) return",
"request, slug): contention = self.get_object() if contention.premises.exists(): contention.is_published = True contention.save() messages.info(request, u\"Argüman",
"-*- coding:utf-8 -*- import json from datetime import timedelta from markdown2 import markdown",
"self).get_context_data( content=content, **kwargs) class ArgumentCreationView(CreateView): template_name = \"premises/new_contention.html\" form_class = ArgumentCreationForm def form_valid(self,",
"premise.user: return not premise.reported_by(user) return False def is_singular(self, contention): result = (contention .premises",
"super(AboutView, self).get_context_data( content=content, **kwargs) class TosView(TemplateView): template_name = \"tos.html\" def get_context_data(self, **kwargs): content",
"\"updated\" def get_contentions(self, paginate=True): contentions = (Contention .objects .filter(is_published=True) .order_by('-date_modification')) if paginate: contentions",
"self).get_context_data( premise=self.get_premise(), **kwargs) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_premise(self): return get_object_or_404(Premise, pk=self.kwargs['pk'])",
"def post(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise, user=self.request.user) return",
"premises.signals import (added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise) from premises.templatetags.premise_tags import check_content_deletion from newsfeed.models import",
"\"parent\": parent.text if parent else None, \"reportable_by_authenticated_user\": self.user_can_report(premise, user), \"report_count\": premise.reports.count(), \"user\": {",
"= self.get_offset() + self.paginate_by return '?offset=%(offset)s' % { \"offset\": offset } def get_unread_notifications(self):",
"kaldırıldı.\") return redirect(contention) class ArgumentDeleteView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug):",
"= now() - timedelta(days=3) contentions = (Contention .objects .annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week)) if paginate:",
"'reporter': self.request.user } def form_valid(self, form): contention = self.get_contention() premise = self.get_premise() form.instance.contention",
"import timedelta from markdown2 import markdown from django.contrib import messages from django.core.urlresolvers import",
"'search' def get_context_data(self, **kwargs): return super(SearchView, self).get_context_data( keywords=self.get_keywords(), **kwargs ) def get_keywords(self): return",
"UpdateView from django.db.models import Count from blog.models import Post from premises.utils import int_or_zero",
"PremiseUnsupportView(PremiseSupportView): def delete(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.remove(self.request.user) return redirect(self.get_contention()) post",
"get_keywords(self): return self.request.GET.get('keywords') or \"\" def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return",
"content=content, **kwargs) class ArgumentCreationView(CreateView): template_name = \"premises/new_contention.html\" form_class = ArgumentCreationForm def form_valid(self, form):",
"form_class = ReportForm template_name = \"premises/report.html\" def get_context_data(self, **kwargs): return super(ReportView, self).get_context_data( premise=self.get_premise(),",
"*args, **kwargs): premise = self.get_premise() premise.delete() premise.update_sibling_counts() contention = self.get_contention() if not contention.premises.exists():",
"= self.get_contention() if not contention.premises.exists(): contention.is_published = False contention.save() return redirect(contention) post =",
"def delete(self, request, *args, **kwargs): premise = self.get_premise() premise.delete() premise.update_sibling_counts() contention = self.get_contention()",
"def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() contention.is_published =",
"**kwargs): return super(PremiseCreationView, self).get_context_data( contention=self.get_contention(), parent=self.get_parent(), **kwargs) def form_valid(self, form): contention = self.get_contention()",
"Contention.objects.none() else: result = (Contention .objects .filter(title__icontains=keywords)) if paginate: result = result[self.get_offset():self.get_limit()] return",
"\"keywords\": self.get_keywords() } def get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords') if not keywords or",
"premise=form.instance) else: added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification = timezone.now() contention.save() return redirect(contention) def get_contention(self): return",
"class PremiseSupportView(View): def get_premise(self): premises = Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def post(self, request,",
"= self.get_object(self.get_queryset()) return HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention, self.request.user), }), content_type=\"application/json\") def build_tree(self, contention, user):",
"= (\"list-view\" if self.request.GET.get(\"view\") == \"list\" else \"tree-view\") edit_mode = ( self.request.user.is_superuser or",
"def get_context_data(self, **kwargs): notifications_qs = self.request.user.notifications.all()[:40] notifications = list(notifications_qs) self.mark_as_read(notifications_qs) return super(HomeView, self).get_context_data(",
"timedelta from markdown2 import markdown from django.contrib import messages from django.core.urlresolvers import reverse",
"= \"news\" def get_contentions(self, paginate=True): contentions = Contention.objects.filter( is_published=True) if paginate: contentions =",
"get_object_or_404(Contention, slug=self.kwargs['slug']) class ReportView(CreateView): form_class = ReportForm template_name = \"premises/report.html\" def get_context_data(self, **kwargs):",
"- timedelta(days=3) contentions = (Contention .objects .annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week)) if paginate: return contentions[self.get_offset():self.get_limit()]",
"get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s' % { \"offset\": offset, \"keywords\":",
"u\"Argüman yayından kaldırıldı.\") return redirect(contention) class ArgumentDeleteView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self,",
"if self.request.user.is_authenticated(): notifications_qs = self.get_unread_notifications() notifications = list(notifications_qs) self.mark_as_read(notifications_qs) else: notifications = None",
"if self.request.user.is_superuser: return premises return premises.filter(user=self.request.user) def form_valid(self, form): response = super(PremiseEditView, self).form_valid(form)",
"get_premise(self): if self.request.user.is_staff: premises = Premise.objects.all() else: premises = Premise.objects.filter(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk'])",
".annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week)) if paginate: return contentions[self.get_offset():self.get_limit()] return contentions class AboutView(TemplateView): template_name =",
"def get_context_data(self, **kwargs): content = markdown(render_to_string(\"tos.md\")) return super(TosView, self).get_context_data( content=content, **kwargs) class ArgumentCreationView(CreateView):",
"self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentPublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request,",
"premise.user.username, \"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username]) }, \"sources\": premise.sources, \"premise_type\": premise.premise_class(), \"children\": (self.get_premises(contention, user, parent=premise)",
"contention.published_premises(parent)] return children def user_can_report(self, premise, user): if user.is_authenticated() and user != premise.user:",
"super(ContentionDetailView, self).get_context_data( view=view, path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs) class ContentionJsonView(DetailView): model = Contention def render_to_response(self,",
"}, \"sources\": premise.sources, \"premise_type\": premise.premise_class(), \"children\": (self.get_premises(contention, user, parent=premise) if premise.published_children().exists() else [])",
"1 class HomeView(TemplateView): template_name = \"index.html\" tab_class = \"featured\" paginate_by = 20 def",
"import int_or_zero from premises.models import Contention, Premise from premises.forms import (ArgumentCreationForm, PremiseCreationForm, PremiseEditForm,",
"if self.request.user.is_staff: premises = Premise.objects.all() else: premises = Premise.objects.filter(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def",
"form): response = super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts() return response def get_context_data(self, **kwargs): return super(PremiseEditView,",
"return super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions, **kwargs) def get_announcements(self): return",
"content=content, **kwargs) class TosView(TemplateView): template_name = \"tos.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"tos.md\"))",
"django.db.models import Count from blog.models import Post from premises.utils import int_or_zero from premises.models",
"slug=self.kwargs['slug']) def get_parent(self): parent_pk = self.kwargs.get(\"pk\") if parent_pk: return get_object_or_404(Premise, pk=parent_pk) class PremiseSupportView(View):",
"self.request.user.is_staff: premises = Premise.objects.all() else: premises = Premise.objects.filter(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def delete(self,",
"+ self.paginate_by return '?offset=%(offset)s' % { \"offset\": offset } def get_unread_notifications(self): return (self.request.user",
"Premise from premises.forms import (ArgumentCreationForm, PremiseCreationForm, PremiseEditForm, ReportForm) from premises.signals import (added_premise_for_premise, added_premise_for_contention,",
"from django.views.generic import DetailView, TemplateView, CreateView, View from django.views.generic.edit import UpdateView from django.db.models",
"= \"premises/contention_detail.html\" model = Contention def get_context_data(self, **kwargs): contention = self.get_object() view =",
"else None, \"reportable_by_authenticated_user\": self.user_can_report(premise, user), \"report_count\": premise.reports.count(), \"user\": { \"id\": premise.user.id, \"username\": premise.user.username,",
"en az 1 \" u\"önerme ekleyin.\") return redirect(contention) class ArgumentUnpublishView(DetailView): def get_queryset(self): return",
"= \"premises/report.html\" def get_context_data(self, **kwargs): return super(ReportView, self).get_context_data( premise=self.get_premise(), **kwargs) def get_contention(self): return",
"def get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords') if not keywords or len(keywords) < 2:",
"self.get_contentions() if self.request.user.is_authenticated(): notifications_qs = self.get_unread_notifications() notifications = list(notifications_qs) self.mark_as_read(notifications_qs) else: notifications =",
"import DetailView, TemplateView, CreateView, View from django.views.generic.edit import UpdateView from django.db.models import Count",
"'?offset=%(offset)s&keywords=%(keywords)s' % { \"offset\": offset, \"keywords\": self.get_keywords() } def get_contentions(self, paginate=True): keywords =",
"self.get_contention() form.instance.user = self.request.user form.instance.argument = contention form.instance.parent = self.get_parent() form.instance.is_approved = True",
"self.request.GET.get('keywords') if not keywords or len(keywords) < 2: result = Contention.objects.none() else: result",
"premises.forms import (ArgumentCreationForm, PremiseCreationForm, PremiseEditForm, ReportForm) from premises.signals import (added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise)",
"contentions[self.get_offset():self.get_limit()] return contentions class AboutView(TemplateView): template_name = \"about.html\" def get_context_data(self, **kwargs): content =",
"PremiseCreationView(CreateView): template_name = \"premises/new_premise.html\" form_class = PremiseCreationForm def get_context_data(self, **kwargs): return super(PremiseCreationView, self).get_context_data(",
"return get_object_or_404(premises, pk=self.kwargs['pk']) def post(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self,",
"request, *args, **kwargs): premise = self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise, user=self.request.user) return redirect(self.get_contention()) def",
"% { \"offset\": offset, \"keywords\": self.get_keywords() } def get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords')",
"Contention, Premise from premises.forms import (ArgumentCreationForm, PremiseCreationForm, PremiseEditForm, ReportForm) from premises.signals import (added_premise_for_premise,",
"def form_valid(self, form): form.instance.user = self.request.user form.instance.ip_address = self.request.META['REMOTE_ADDR'] response = super(ArgumentCreationView, self).form_valid(form)",
"form_valid(self, form): form.instance.user = self.request.user form.instance.ip_address = self.request.META['REMOTE_ADDR'] response = super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts()",
"tab_class = \"featured\" paginate_by = 20 def get_context_data(self, **kwargs): contentions = self.get_contentions() if",
"return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() contention.is_published = False contention.save()",
"premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise, user=self.request.user) return redirect(self.get_contention()) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView):",
"super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts() return response def get_context_data(self, **kwargs): return super(PremiseEditView, self).get_context_data( #contention=self.get_contention(), **kwargs)",
"class HomeView(TemplateView): template_name = \"index.html\" tab_class = \"featured\" paginate_by = 20 def get_context_data(self,",
"paginate=True): last_week = now() - timedelta(days=3) contentions = (Contention .objects .annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week))",
"ArgumentCreationForm def get_queryset(self): contentions = Contention.objects.all() if self.request.user.is_superuser: return contentions return contentions.filter(user=self.request.user) def",
"path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs) class ContentionJsonView(DetailView): model = Contention def render_to_response(self, context, **response_kwargs): contention",
"response = super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts() return response def get_context_data(self, **kwargs): return super(PremiseEditView, self).get_context_data(",
"post(self, request, slug): contention = self.get_object() contention.is_published = False contention.save() messages.info(request, u\"Argüman yayından",
"[]) } for premise in contention.published_premises(parent)] return children def user_can_report(self, premise, user): if",
"else: premises = Premise.objects.filter(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def delete(self, request, *args, **kwargs): premise",
"class SearchView(HomeView): tab_class = 'search' def get_context_data(self, **kwargs): return super(SearchView, self).get_context_data( keywords=self.get_keywords(), **kwargs",
"class NewsView(HomeView): tab_class = \"news\" def get_contentions(self, paginate=True): contentions = Contention.objects.filter( is_published=True) if",
"def get_contentions(self, paginate=True): contentions = (Contention .objects .filter(is_published=True) .order_by('-date_modification')) if paginate: contentions =",
"get_context_data(self, **kwargs): return super(SearchView, self).get_context_data( keywords=self.get_keywords(), **kwargs ) def get_keywords(self): return self.request.GET.get('keywords') or",
"delete = post class PremiseEditView(UpdateView): template_name = \"premises/edit_premise.html\" form_class = PremiseEditForm def get_queryset(self):",
"if parent else None, \"reportable_by_authenticated_user\": self.user_can_report(premise, user), \"report_count\": premise.reports.count(), \"user\": { \"id\": premise.user.id,",
"= \"index.html\" tab_class = \"featured\" paginate_by = 20 def get_context_data(self, **kwargs): contentions =",
"response = super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentPublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user)",
"has_next_page(self): total = self.get_contentions(paginate=False).count() return total > (self.get_offset() + self.paginate_by) def get_next_page_url(self): offset",
"or len(keywords) < 2: result = Contention.objects.none() else: result = (Contention .objects .filter(title__icontains=keywords))",
"False contention.save() messages.info(request, u\"Argüman yayından kaldırıldı.\") return redirect(contention) class ArgumentDeleteView(DetailView): def get_queryset(self): return",
".objects .annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week)) if paginate: return contentions[self.get_offset():self.get_limit()] return contentions class AboutView(TemplateView): template_name",
"\"owner\": contention.owner, \"sources\": contention.sources, \"is_singular\": self.is_singular(contention), \"children\": self.get_premises(contention, user) } def get_premises(self, contention,",
"Premise.objects.filter(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def delete(self, request, *args, **kwargs): premise = self.get_premise() premise.delete()",
"= (Contention .objects .filter(is_published=True) .order_by('-date_modification')) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class",
"slug=self.kwargs['slug']) class ReportView(CreateView): form_class = ReportForm template_name = \"premises/report.html\" def get_context_data(self, **kwargs): return",
"if self.request.GET.get(\"view\") == \"list\" else \"tree-view\") edit_mode = ( self.request.user.is_superuser or self.request.user.is_staff or",
"class ArgumentUpdateView(UpdateView): template_name = \"premises/edit_contention.html\" form_class = ArgumentCreationForm def get_queryset(self): contentions = Contention.objects.all()",
"False def is_singular(self, contention): result = (contention .premises .all() .aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling'] <=",
"SearchView(HomeView): tab_class = 'search' def get_context_data(self, **kwargs): return super(SearchView, self).get_context_data( keywords=self.get_keywords(), **kwargs )",
"= ( self.request.user.is_superuser or self.request.user.is_staff or contention.user == self.request.user) return super(ContentionDetailView, self).get_context_data( view=view,",
"def get_queryset(self): contentions = Contention.objects.all() if self.request.user.is_superuser: return contentions return contentions.filter(user=self.request.user) def form_valid(self,",
"contention): result = (contention .premises .all() .aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling'] <= 1 class HomeView(TemplateView):",
"redirect(contention) class ArgumentDeleteView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention =",
"\"pk\": premise.pk, \"name\": premise.text, \"parent\": parent.text if parent else None, \"reportable_by_authenticated_user\": self.user_can_report(premise, user),",
"\"notifications.html\" def get_context_data(self, **kwargs): notifications_qs = self.request.user.notifications.all()[:40] notifications = list(notifications_qs) self.mark_as_read(notifications_qs) return super(HomeView,",
"self.paginate_by def has_next_page(self): total = self.get_contentions(paginate=False).count() return total > (self.get_offset() + self.paginate_by) def",
"**kwargs) class SearchView(HomeView): tab_class = 'search' def get_context_data(self, **kwargs): return super(SearchView, self).get_context_data( keywords=self.get_keywords(),",
"post(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise, user=self.request.user) return redirect(self.get_contention())",
"post = delete def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class ReportView(CreateView): form_class = ReportForm",
"\"tree-view\") edit_mode = ( self.request.user.is_superuser or self.request.user.is_staff or contention.user == self.request.user) return super(ContentionDetailView,",
"delete(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.remove(self.request.user) return redirect(self.get_contention()) post = delete",
".update(is_read=True)) def get_contentions(self, paginate=True): contentions = (Contention .objects .featured()) if paginate: contentions =",
"contention.save() return redirect(contention) post = delete def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class ReportView(CreateView):",
"response class ArgumentPublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention =",
"django.http import HttpResponse from django.shortcuts import get_object_or_404, redirect from django.template.loader import render_to_string from",
"None return super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions, **kwargs) def get_announcements(self):",
"context, **response_kwargs): contention = self.get_object(self.get_queryset()) return HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention, self.request.user), }), content_type=\"application/json\") def",
"pk=self.kwargs['pk']) def get_initial(self): return { 'contention': self.get_contention(), 'premise': self.get_premise(), 'reporter': self.request.user } def",
"form.instance.update_sibling_counts() return response class ArgumentUpdateView(UpdateView): template_name = \"premises/edit_contention.html\" form_class = ArgumentCreationForm def get_queryset(self):",
"**kwargs): premise = self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise, user=self.request.user) return redirect(self.get_contention()) def get_contention(self): return",
"premise.update_sibling_counts() contention = self.get_contention() if not contention.premises.exists(): contention.is_published = False contention.save() return redirect(contention)",
"yayına almadan önce en az 1 \" u\"önerme ekleyin.\") return redirect(contention) class ArgumentUnpublishView(DetailView):",
"self.request.user.notifications.all()[:40] notifications = list(notifications_qs) self.mark_as_read(notifications_qs) return super(HomeView, self).get_context_data( notifications=notifications, **kwargs) class SearchView(HomeView): tab_class",
"self.request.user.is_authenticated(): notifications_qs = self.get_unread_notifications() notifications = list(notifications_qs) self.mark_as_read(notifications_qs) else: notifications = None return",
"return response class ArgumentPublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention",
"form.instance.user = self.request.user form.instance.ip_address = self.request.META['REMOTE_ADDR'] response = super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() return response",
"slug): contention = self.get_object() if check_content_deletion(contention): # remove notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete() messages.info(request,",
"get_announcements(self): return Post.objects.filter(is_announcement=True) def get_offset(self): return int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self): return self.get_offset() + self.paginate_by",
"\"children\": (self.get_premises(contention, user, parent=premise) if premise.published_children().exists() else []) } for premise in contention.published_premises(parent)]",
"def build_tree(self, contention, user): return { \"name\": contention.title, \"parent\": None, \"pk\": contention.pk, \"owner\":",
"class UpdatedArgumentsView(HomeView): tab_class = \"updated\" def get_contentions(self, paginate=True): contentions = (Contention .objects .filter(is_published=True)",
"= self.request.user form.instance.argument = contention form.instance.parent = self.get_parent() form.instance.is_approved = True form.instance.ip_address =",
"if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class ControversialArgumentsView(HomeView): tab_class = \"controversial\" def",
"return { 'contention': self.get_contention(), 'premise': self.get_premise(), 'reporter': self.request.user } def form_valid(self, form): contention",
"offset } def get_unread_notifications(self): return (self.request.user .notifications .filter(is_read=False) [:5]) def mark_as_read(self, notifications): pks",
"get_object_or_404(Contention, slug=self.kwargs['slug']) def get_parent(self): parent_pk = self.kwargs.get(\"pk\") if parent_pk: return get_object_or_404(Premise, pk=parent_pk) class",
"is_singular(self, contention): result = (contention .premises .all() .aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling'] <= 1 class",
"build_tree(self, contention, user): return { \"name\": contention.title, \"parent\": None, \"pk\": contention.pk, \"owner\": contention.owner,",
"NotificationsView(HomeView): template_name = \"notifications.html\" def get_context_data(self, **kwargs): notifications_qs = self.request.user.notifications.all()[:40] notifications = list(notifications_qs)",
"form): form.instance.user = self.request.user form.instance.ip_address = self.request.META['REMOTE_ADDR'] response = super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() return",
"durumda değil.\") return redirect(contention) delete = post class PremiseEditView(UpdateView): template_name = \"premises/edit_premise.html\" form_class",
"get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() contention.is_published = False",
"paginate=True): contentions = Contention.objects.filter( is_published=True) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class",
"contention = self.get_object() contention.is_published = False contention.save() messages.info(request, u\"Argüman yayından kaldırıldı.\") return redirect(contention)",
"form.instance.user = self.request.user form.instance.argument = contention form.instance.parent = self.get_parent() form.instance.is_approved = True form.instance.ip_address",
"< 2: result = Contention.objects.none() else: result = (Contention .objects .filter(title__icontains=keywords)) if paginate:",
"else: added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification = timezone.now() contention.save() return redirect(contention) def get_contention(self): return get_object_or_404(Contention,",
"return { \"name\": contention.title, \"parent\": None, \"pk\": contention.pk, \"owner\": contention.owner, \"sources\": contention.sources, \"is_singular\":",
"else []) } for premise in contention.published_premises(parent)] return children def user_can_report(self, premise, user):",
"= self.get_unread_notifications() notifications = list(notifications_qs) self.mark_as_read(notifications_qs) else: notifications = None return super(HomeView, self).get_context_data(",
"class ContentionJsonView(DetailView): model = Contention def render_to_response(self, context, **response_kwargs): contention = self.get_object(self.get_queryset()) return",
"self.mark_as_read(notifications_qs) else: notifications = None return super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(),",
"**kwargs): premise = self.get_premise() premise.supporters.remove(self.request.user) return redirect(self.get_contention()) post = delete class PremiseDeleteView(View): def",
"ReportView(CreateView): form_class = ReportForm template_name = \"premises/report.html\" def get_context_data(self, **kwargs): return super(ReportView, self).get_context_data(",
"**kwargs): return super(PremiseEditView, self).get_context_data( #contention=self.get_contention(), **kwargs) class PremiseCreationView(CreateView): template_name = \"premises/new_premise.html\" form_class =",
"from django.views.generic.edit import UpdateView from django.db.models import Count from blog.models import Post from",
"self.get_offset() + self.paginate_by return '?offset=%(offset)s' % { \"offset\": offset } def get_unread_notifications(self): return",
"def is_singular(self, contention): result = (contention .premises .all() .aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling'] <= 1",
"= PremiseCreationForm def get_context_data(self, **kwargs): return super(PremiseCreationView, self).get_context_data( contention=self.get_contention(), parent=self.get_parent(), **kwargs) def form_valid(self,",
"model = Contention def get_context_data(self, **kwargs): contention = self.get_object() view = (\"list-view\" if",
"form): contention = self.get_contention() form.instance.user = self.request.user form.instance.argument = contention form.instance.parent = self.get_parent()",
"if paginate: contentions = (contentions[self.get_offset(): self.get_limit()]) return contentions class NotificationsView(HomeView): template_name = \"notifications.html\"",
"args=[premise.user.username]) }, \"sources\": premise.sources, \"premise_type\": premise.premise_class(), \"children\": (self.get_premises(contention, user, parent=premise) if premise.published_children().exists() else",
"offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s' % { \"offset\": offset } def",
"paginate=True): keywords = self.request.GET.get('keywords') if not keywords or len(keywords) < 2: result =",
"def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_parent(self): parent_pk = self.kwargs.get(\"pk\") if parent_pk: return",
"reported_as_fallacy, supported_a_premise) from premises.templatetags.premise_tags import check_content_deletion from newsfeed.models import Entry class ContentionDetailView(DetailView): template_name",
"= Contention.objects.none() else: result = (Contention .objects .filter(title__icontains=keywords)) if paginate: result = result[self.get_offset():self.get_limit()]",
"return contentions[self.get_offset():self.get_limit()] return contentions class AboutView(TemplateView): template_name = \"about.html\" def get_context_data(self, **kwargs): content",
"django.db.models import Max from django.utils.timezone import now from django.http import HttpResponse from django.shortcuts",
"contentions = Contention.objects.filter( is_published=True) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class UpdatedArgumentsView(HomeView):",
"= \"premises/new_premise.html\" form_class = PremiseCreationForm def get_context_data(self, **kwargs): return super(PremiseCreationView, self).get_context_data( contention=self.get_contention(), parent=self.get_parent(),",
"premise.sources, \"premise_type\": premise.premise_class(), \"children\": (self.get_premises(contention, user, parent=premise) if premise.published_children().exists() else []) } for",
"u\"Argümanınız silindi.\") return redirect(\"home\") else: messages.info(request, u\"Argümanınız silinecek durumda değil.\") return redirect(contention) delete",
"return super(HomeView, self).get_context_data( notifications=notifications, **kwargs) class SearchView(HomeView): tab_class = 'search' def get_context_data(self, **kwargs):",
"get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s' % { \"offset\": offset }",
"self.request.user.is_staff or contention.user == self.request.user) return super(ContentionDetailView, self).get_context_data( view=view, path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs) class",
"user), \"report_count\": premise.reports.count(), \"user\": { \"id\": premise.user.id, \"username\": premise.user.username, \"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username]) },",
"> (self.get_offset() + self.paginate_by) def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s'",
"def render_to_response(self, context, **response_kwargs): contention = self.get_object(self.get_queryset()) return HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention, self.request.user), }),",
"contention = self.get_object() view = (\"list-view\" if self.request.GET.get(\"view\") == \"list\" else \"tree-view\") edit_mode",
"post class PremiseEditView(UpdateView): template_name = \"premises/edit_premise.html\" form_class = PremiseEditForm def get_queryset(self): premises =",
"}), content_type=\"application/json\") def build_tree(self, contention, user): return { \"name\": contention.title, \"parent\": None, \"pk\":",
"super(ReportView, self).get_context_data( premise=self.get_premise(), **kwargs) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_premise(self): return get_object_or_404(Premise,",
"get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_premise(self): return get_object_or_404(Premise, pk=self.kwargs['pk']) def get_initial(self): return {",
"self.build_tree(contention, self.request.user), }), content_type=\"application/json\") def build_tree(self, contention, user): return { \"name\": contention.title, \"parent\":",
"**kwargs): contention = self.get_object() view = (\"list-view\" if self.request.GET.get(\"view\") == \"list\" else \"tree-view\")",
"form.instance.ip_address = self.request.META['REMOTE_ADDR'] response = super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentUpdateView(UpdateView): template_name",
"contention.id) contention.delete() messages.info(request, u\"Argümanınız silindi.\") return redirect(\"home\") else: messages.info(request, u\"Argümanınız silinecek durumda değil.\")",
"PremiseCreationForm, PremiseEditForm, ReportForm) from premises.signals import (added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise) from premises.templatetags.premise_tags import",
"super(PremiseEditView, self).get_context_data( #contention=self.get_contention(), **kwargs) class PremiseCreationView(CreateView): template_name = \"premises/new_premise.html\" form_class = PremiseCreationForm def",
"= contentions[self.get_offset():self.get_limit()] return contentions class UpdatedArgumentsView(HomeView): tab_class = \"updated\" def get_contentions(self, paginate=True): contentions",
"(self.request.user .notifications .filter(id__in=pks) .update(is_read=True)) def get_contentions(self, paginate=True): contentions = (Contention .objects .featured()) if",
"if check_content_deletion(contention): # remove notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete() messages.info(request, u\"Argümanınız silindi.\") return redirect(\"home\")",
"**kwargs): notifications_qs = self.request.user.notifications.all()[:40] notifications = list(notifications_qs) self.mark_as_read(notifications_qs) return super(HomeView, self).get_context_data( notifications=notifications, **kwargs)",
"return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_premise(self): return get_object_or_404(Premise, pk=self.kwargs['pk']) def get_initial(self): return { 'contention':",
"\"offset\": offset, \"keywords\": self.get_keywords() } def get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords') if not",
"return super(PremiseEditView, self).get_context_data( #contention=self.get_contention(), **kwargs) class PremiseCreationView(CreateView): template_name = \"premises/new_premise.html\" form_class = PremiseCreationForm",
"def post(self, request, slug): contention = self.get_object() contention.is_published = False contention.save() messages.info(request, u\"Argüman",
"not premise.reported_by(user) return False def is_singular(self, contention): result = (contention .premises .all() .aggregate(max_sibling=Max('sibling_count')))",
"az 1 \" u\"önerme ekleyin.\") return redirect(contention) class ArgumentUnpublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user)",
"def get_contentions(self, paginate=True): last_week = now() - timedelta(days=3) contentions = (Contention .objects .annotate(num_children=Count('premises'))",
"premises.utils import int_or_zero from premises.models import Contention, Premise from premises.forms import (ArgumentCreationForm, PremiseCreationForm,",
"View from django.views.generic.edit import UpdateView from django.db.models import Count from blog.models import Post",
"user): return { \"name\": contention.title, \"parent\": None, \"pk\": contention.pk, \"owner\": contention.owner, \"sources\": contention.sources,",
"= ArgumentCreationForm def form_valid(self, form): form.instance.user = self.request.user form.instance.ip_address = self.request.META['REMOTE_ADDR'] response =",
"now from django.http import HttpResponse from django.shortcuts import get_object_or_404, redirect from django.template.loader import",
"contention = self.get_object(self.get_queryset()) return HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention, self.request.user), }), content_type=\"application/json\") def build_tree(self, contention,",
"result = result[self.get_offset():self.get_limit()] return result class NewsView(HomeView): tab_class = \"news\" def get_contentions(self, paginate=True):",
"return False def is_singular(self, contention): result = (contention .premises .all() .aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling']",
"**kwargs): premise = self.get_premise() premise.delete() premise.update_sibling_counts() contention = self.get_contention() if not contention.premises.exists(): contention.is_published",
"== \"list\" else \"tree-view\") edit_mode = ( self.request.user.is_superuser or self.request.user.is_staff or contention.user ==",
"**kwargs ) def get_keywords(self): return self.request.GET.get('keywords') or \"\" def get_next_page_url(self): offset = self.get_offset()",
"form_valid(self, form): response = super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts() return response def get_context_data(self, **kwargs): return",
"20 def get_context_data(self, **kwargs): contentions = self.get_contentions() if self.request.user.is_authenticated(): notifications_qs = self.get_unread_notifications() notifications",
"return result class NewsView(HomeView): tab_class = \"news\" def get_contentions(self, paginate=True): contentions = Contention.objects.filter(",
"return response class ArgumentUpdateView(UpdateView): template_name = \"premises/edit_contention.html\" form_class = ArgumentCreationForm def get_queryset(self): contentions",
"parent_pk: return get_object_or_404(Premise, pk=parent_pk) class PremiseSupportView(View): def get_premise(self): premises = Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises,",
"= contentions[self.get_offset():self.get_limit()] return contentions class ControversialArgumentsView(HomeView): tab_class = \"controversial\" def get_contentions(self, paginate=True): last_week",
"form_valid(self, form): form.instance.user = self.request.user response = super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts() return response class",
"def get_premises(self, contention, user, parent=None): children = [{ \"pk\": premise.pk, \"name\": premise.text, \"parent\":",
"request, *args, **kwargs): premise = self.get_premise() premise.delete() premise.update_sibling_counts() contention = self.get_contention() if not",
"list(notifications_qs) self.mark_as_read(notifications_qs) return super(HomeView, self).get_context_data( notifications=notifications, **kwargs) class SearchView(HomeView): tab_class = 'search' def",
"messages.info(request, u\"Argümanınız silinecek durumda değil.\") return redirect(contention) delete = post class PremiseEditView(UpdateView): template_name",
"user.is_authenticated() and user != premise.user: return not premise.reported_by(user) return False def is_singular(self, contention):",
"True form.instance.ip_address = self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts() if form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance) else: added_premise_for_contention.send(sender=self, premise=form.instance)",
"return get_object_or_404(Premise, pk=self.kwargs['pk']) def get_initial(self): return { 'contention': self.get_contention(), 'premise': self.get_premise(), 'reporter': self.request.user",
"\"news\" def get_contentions(self, paginate=True): contentions = Contention.objects.filter( is_published=True) if paginate: contentions = contentions[self.get_offset():self.get_limit()]",
"class PremiseCreationView(CreateView): template_name = \"premises/new_premise.html\" form_class = PremiseCreationForm def get_context_data(self, **kwargs): return super(PremiseCreationView,",
"class PremiseDeleteView(View): def get_premise(self): if self.request.user.is_staff: premises = Premise.objects.all() else: premises = Premise.objects.filter(user=self.request.user)",
"= \"updated\" def get_contentions(self, paginate=True): contentions = (Contention .objects .filter(is_published=True) .order_by('-date_modification')) if paginate:",
".order_by('-date_modification')) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class ControversialArgumentsView(HomeView): tab_class = \"controversial\"",
"contentions = (Contention .objects .filter(is_published=True) .order_by('-date_modification')) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions",
".filter(title__icontains=keywords)) if paginate: result = result[self.get_offset():self.get_limit()] return result class NewsView(HomeView): tab_class = \"news\"",
"= Premise.objects.filter(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def delete(self, request, *args, **kwargs): premise = self.get_premise()",
"form_valid(self, form): contention = self.get_contention() premise = self.get_premise() form.instance.contention = contention form.instance.premise =",
"if premise.published_children().exists() else []) } for premise in contention.published_premises(parent)] return children def user_can_report(self,",
"CreateView, View from django.views.generic.edit import UpdateView from django.db.models import Count from blog.models import",
"= (Contention .objects .filter(title__icontains=keywords)) if paginate: result = result[self.get_offset():self.get_limit()] return result class NewsView(HomeView):",
"Post from premises.utils import int_or_zero from premises.models import Contention, Premise from premises.forms import",
"== self.request.user) return super(ContentionDetailView, self).get_context_data( view=view, path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs) class ContentionJsonView(DetailView): model =",
"keywords = self.request.GET.get('keywords') if not keywords or len(keywords) < 2: result = Contention.objects.none()",
"paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class UpdatedArgumentsView(HomeView): tab_class = \"updated\" def get_contentions(self,",
"return super(AboutView, self).get_context_data( content=content, **kwargs) class TosView(TemplateView): template_name = \"tos.html\" def get_context_data(self, **kwargs):",
"= (contention .premises .all() .aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling'] <= 1 class HomeView(TemplateView): template_name =",
"keywords=self.get_keywords(), **kwargs ) def get_keywords(self): return self.request.GET.get('keywords') or \"\" def get_next_page_url(self): offset =",
"\"reportable_by_authenticated_user\": self.user_can_report(premise, user), \"report_count\": premise.reports.count(), \"user\": { \"id\": premise.user.id, \"username\": premise.user.username, \"absolute_url\": reverse(\"auth_profile\",",
"get_context_data(self, **kwargs): contentions = self.get_contentions() if self.request.user.is_authenticated(): notifications_qs = self.get_unread_notifications() notifications = list(notifications_qs)",
"(\"list-view\" if self.request.GET.get(\"view\") == \"list\" else \"tree-view\") edit_mode = ( self.request.user.is_superuser or self.request.user.is_staff",
"ContentionJsonView(DetailView): model = Contention def render_to_response(self, context, **response_kwargs): contention = self.get_object(self.get_queryset()) return HttpResponse(json.dumps({",
"return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() if contention.premises.exists(): contention.is_published =",
"import reverse from django.utils import timezone from django.db.models import Max from django.utils.timezone import",
"contention = self.get_contention() premise = self.get_premise() form.instance.contention = contention form.instance.premise = premise form.instance.reporter",
"if not keywords or len(keywords) < 2: result = Contention.objects.none() else: result =",
"def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s' % { \"offset\": offset",
"class ArgumentPublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object()",
"else: messages.info(request, u\"Argümanınız silinecek durumda değil.\") return redirect(contention) delete = post class PremiseEditView(UpdateView):",
"supported_a_premise.send(sender=self, premise=premise, user=self.request.user) return redirect(self.get_contention()) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView): def",
"premise.user.id, \"username\": premise.user.username, \"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username]) }, \"sources\": premise.sources, \"premise_type\": premise.premise_class(), \"children\": (self.get_premises(contention,",
"ControversialArgumentsView(HomeView): tab_class = \"controversial\" def get_contentions(self, paginate=True): last_week = now() - timedelta(days=3) contentions",
"= self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts() if form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance) else: added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification =",
"import json from datetime import timedelta from markdown2 import markdown from django.contrib import",
"else: notifications = None return super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions,",
"contention.sources, \"is_singular\": self.is_singular(contention), \"children\": self.get_premises(contention, user) } def get_premises(self, contention, user, parent=None): children",
"def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() if check_content_deletion(contention):",
"form.instance.argument = contention form.instance.parent = self.get_parent() form.instance.is_approved = True form.instance.ip_address = self.request.META['REMOTE_ADDR'] form.save()",
"contention.premises.exists(): contention.is_published = False contention.save() return redirect(contention) post = delete def get_contention(self): return",
"premises.models import Contention, Premise from premises.forms import (ArgumentCreationForm, PremiseCreationForm, PremiseEditForm, ReportForm) from premises.signals",
"len(keywords) < 2: result = Contention.objects.none() else: result = (Contention .objects .filter(title__icontains=keywords)) if",
"return not premise.reported_by(user) return False def is_singular(self, contention): result = (contention .premises .all()",
"pk=self.kwargs['pk']) def post(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise, user=self.request.user)",
"delete class PremiseDeleteView(View): def get_premise(self): if self.request.user.is_staff: premises = Premise.objects.all() else: premises =",
"contention.pk, \"owner\": contention.owner, \"sources\": contention.sources, \"is_singular\": self.is_singular(contention), \"children\": self.get_premises(contention, user) } def get_premises(self,",
"form.instance.update_sibling_counts() return response class ArgumentPublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug):",
"} def get_premises(self, contention, user, parent=None): children = [{ \"pk\": premise.pk, \"name\": premise.text,",
"self.get_offset() + self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s' % { \"offset\": offset, \"keywords\": self.get_keywords() } def",
"coding:utf-8 -*- import json from datetime import timedelta from markdown2 import markdown from",
"= self.request.META['REMOTE_ADDR'] response = super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentUpdateView(UpdateView): template_name =",
".filter(is_read=False) [:5]) def mark_as_read(self, notifications): pks = notifications.values_list(\"id\", flat=True) (self.request.user .notifications .filter(id__in=pks) .update(is_read=True))",
"get_context_data(self, **kwargs): return super(PremiseEditView, self).get_context_data( #contention=self.get_contention(), **kwargs) class PremiseCreationView(CreateView): template_name = \"premises/new_premise.html\" form_class",
"parent_pk = self.kwargs.get(\"pk\") if parent_pk: return get_object_or_404(Premise, pk=parent_pk) class PremiseSupportView(View): def get_premise(self): premises",
"\"premises/new_contention.html\" form_class = ArgumentCreationForm def form_valid(self, form): form.instance.user = self.request.user form.instance.ip_address = self.request.META['REMOTE_ADDR']",
"premise=form.instance) contention.date_modification = timezone.now() contention.save() return redirect(contention) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def",
"tab_class = \"news\" def get_contentions(self, paginate=True): contentions = Contention.objects.filter( is_published=True) if paginate: contentions",
"import now from django.http import HttpResponse from django.shortcuts import get_object_or_404, redirect from django.template.loader",
"super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions, **kwargs) def get_announcements(self): return Post.objects.filter(is_announcement=True)",
"+ self.paginate_by def has_next_page(self): total = self.get_contentions(paginate=False).count() return total > (self.get_offset() + self.paginate_by)",
"NewsView(HomeView): tab_class = \"news\" def get_contentions(self, paginate=True): contentions = Contention.objects.filter( is_published=True) if paginate:",
"( self.request.user.is_superuser or self.request.user.is_staff or contention.user == self.request.user) return super(ContentionDetailView, self).get_context_data( view=view, path=contention.get_absolute_url(),",
"\"about.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"about.md\")) return super(AboutView, self).get_context_data( content=content, **kwargs) class",
"contention.update_sibling_counts() if form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance) else: added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification = timezone.now() contention.save() return",
"return redirect(contention) delete = post class PremiseEditView(UpdateView): template_name = \"premises/edit_premise.html\" form_class = PremiseEditForm",
"template_name = \"premises/edit_premise.html\" form_class = PremiseEditForm def get_queryset(self): premises = Premise.objects.all() if self.request.user.is_superuser:",
"self).get_context_data( view=view, path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs) class ContentionJsonView(DetailView): model = Contention def render_to_response(self, context,",
"messages from django.core.urlresolvers import reverse from django.utils import timezone from django.db.models import Max",
"contentions[self.get_offset():self.get_limit()] return contentions class ControversialArgumentsView(HomeView): tab_class = \"controversial\" def get_contentions(self, paginate=True): last_week =",
"from django.http import HttpResponse from django.shortcuts import get_object_or_404, redirect from django.template.loader import render_to_string",
"def get_announcements(self): return Post.objects.filter(is_announcement=True) def get_offset(self): return int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self): return self.get_offset() +",
"def get_context_data(self, **kwargs): contentions = self.get_contentions() if self.request.user.is_authenticated(): notifications_qs = self.get_unread_notifications() notifications =",
"form_class = ArgumentCreationForm def get_queryset(self): contentions = Contention.objects.all() if self.request.user.is_superuser: return contentions return",
"and user != premise.user: return not premise.reported_by(user) return False def is_singular(self, contention): result",
"paginate: result = result[self.get_offset():self.get_limit()] return result class NewsView(HomeView): tab_class = \"news\" def get_contentions(self,",
"if user.is_authenticated() and user != premise.user: return not premise.reported_by(user) return False def is_singular(self,",
"u\"önerme ekleyin.\") return redirect(contention) class ArgumentUnpublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request,",
"self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions, **kwargs) def get_announcements(self): return Post.objects.filter(is_announcement=True) def",
"edit_mode=edit_mode, **kwargs) class ContentionJsonView(DetailView): model = Contention def render_to_response(self, context, **response_kwargs): contention =",
"return self.request.GET.get('keywords') or \"\" def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s'",
"= self.get_contention() premise = self.get_premise() form.instance.contention = contention form.instance.premise = premise form.instance.reporter =",
"def get_initial(self): return { 'contention': self.get_contention(), 'premise': self.get_premise(), 'reporter': self.request.user } def form_valid(self,",
"self.get_contention() if not contention.premises.exists(): contention.is_published = False contention.save() return redirect(contention) post = delete",
"view=view, path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs) class ContentionJsonView(DetailView): model = Contention def render_to_response(self, context, **response_kwargs):",
"result['max_sibling'] <= 1 class HomeView(TemplateView): template_name = \"index.html\" tab_class = \"featured\" paginate_by =",
"(contentions[self.get_offset(): self.get_limit()]) return contentions class NotificationsView(HomeView): template_name = \"notifications.html\" def get_context_data(self, **kwargs): notifications_qs",
"super(SearchView, self).get_context_data( keywords=self.get_keywords(), **kwargs ) def get_keywords(self): return self.request.GET.get('keywords') or \"\" def get_next_page_url(self):",
"if self.request.user.is_superuser: return contentions return contentions.filter(user=self.request.user) def form_valid(self, form): form.instance.user = self.request.user response",
"get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() if contention.premises.exists(): contention.is_published",
"from premises.utils import int_or_zero from premises.models import Contention, Premise from premises.forms import (ArgumentCreationForm,",
"check_content_deletion from newsfeed.models import Entry class ContentionDetailView(DetailView): template_name = \"premises/contention_detail.html\" model = Contention",
"= (Contention .objects .annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week)) if paginate: return contentions[self.get_offset():self.get_limit()] return contentions class",
"datetime import timedelta from markdown2 import markdown from django.contrib import messages from django.core.urlresolvers",
"get_offset(self): return int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self): return self.get_offset() + self.paginate_by def has_next_page(self): total =",
"self.request.GET.get('keywords') or \"\" def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s' %",
"self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s' % { \"offset\": offset, \"keywords\": self.get_keywords() } def get_contentions(self, paginate=True):",
"def has_next_page(self): total = self.get_contentions(paginate=False).count() return total > (self.get_offset() + self.paginate_by) def get_next_page_url(self):",
"'contention': self.get_contention(), 'premise': self.get_premise(), 'reporter': self.request.user } def form_valid(self, form): contention = self.get_contention()",
"= self.request.user.notifications.all()[:40] notifications = list(notifications_qs) self.mark_as_read(notifications_qs) return super(HomeView, self).get_context_data( notifications=notifications, **kwargs) class SearchView(HomeView):",
"user): if user.is_authenticated() and user != premise.user: return not premise.reported_by(user) return False def",
"from blog.models import Post from premises.utils import int_or_zero from premises.models import Contention, Premise",
"return get_object_or_404(Premise, pk=parent_pk) class PremiseSupportView(View): def get_premise(self): premises = Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk'])",
"= super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentUpdateView(UpdateView): template_name = \"premises/edit_contention.html\" form_class =",
"template_name = \"notifications.html\" def get_context_data(self, **kwargs): notifications_qs = self.request.user.notifications.all()[:40] notifications = list(notifications_qs) self.mark_as_read(notifications_qs)",
"import timezone from django.db.models import Max from django.utils.timezone import now from django.http import",
"premises return premises.filter(user=self.request.user) def form_valid(self, form): response = super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts() return response",
"messages.info(request, u\"Argüman yayından kaldırıldı.\") return redirect(contention) class ArgumentDeleteView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def",
"Contention.objects.all() if self.request.user.is_superuser: return contentions return contentions.filter(user=self.request.user) def form_valid(self, form): form.instance.user = self.request.user",
"self.get_contention() premise = self.get_premise() form.instance.contention = contention form.instance.premise = premise form.instance.reporter = self.request.user",
".notifications .filter(id__in=pks) .update(is_read=True)) def get_contentions(self, paginate=True): contentions = (Contention .objects .featured()) if paginate:",
"notifications.values_list(\"id\", flat=True) (self.request.user .notifications .filter(id__in=pks) .update(is_read=True)) def get_contentions(self, paginate=True): contentions = (Contention .objects",
"= self.get_object() contention.is_published = False contention.save() messages.info(request, u\"Argüman yayından kaldırıldı.\") return redirect(contention) class",
"contention.delete() messages.info(request, u\"Argümanınız silindi.\") return redirect(\"home\") else: messages.info(request, u\"Argümanınız silinecek durumda değil.\") return",
"def get_queryset(self): premises = Premise.objects.all() if self.request.user.is_superuser: return premises return premises.filter(user=self.request.user) def form_valid(self,",
"django.views.generic.edit import UpdateView from django.db.models import Count from blog.models import Post from premises.utils",
"supported_a_premise) from premises.templatetags.premise_tags import check_content_deletion from newsfeed.models import Entry class ContentionDetailView(DetailView): template_name =",
"return contentions.filter(user=self.request.user) def form_valid(self, form): form.instance.user = self.request.user response = super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts()",
"silindi.\") return redirect(\"home\") else: messages.info(request, u\"Argümanınız silinecek durumda değil.\") return redirect(contention) delete =",
"True contention.save() messages.info(request, u\"Argüman yayına alındı.\") else: messages.info(request, u\"Argümanı yayına almadan önce en",
"offset, \"keywords\": self.get_keywords() } def get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords') if not keywords",
"return total > (self.get_offset() + self.paginate_by) def get_next_page_url(self): offset = self.get_offset() + self.paginate_by",
".featured()) if paginate: contentions = (contentions[self.get_offset(): self.get_limit()]) return contentions class NotificationsView(HomeView): template_name =",
"model = Contention def render_to_response(self, context, **response_kwargs): contention = self.get_object(self.get_queryset()) return HttpResponse(json.dumps({ \"nodes\":",
"form.instance.parent = self.get_parent() form.instance.is_approved = True form.instance.ip_address = self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts() if form.instance.parent:",
"form.instance.argument.update_sibling_counts() return response def get_context_data(self, **kwargs): return super(PremiseEditView, self).get_context_data( #contention=self.get_contention(), **kwargs) class PremiseCreationView(CreateView):",
"return self.get_offset() + self.paginate_by def has_next_page(self): total = self.get_contentions(paginate=False).count() return total > (self.get_offset()",
"\"nodes\": self.build_tree(contention, self.request.user), }), content_type=\"application/json\") def build_tree(self, contention, user): return { \"name\": contention.title,",
"user != premise.user: return not premise.reported_by(user) return False def is_singular(self, contention): result =",
"request, slug): contention = self.get_object() contention.is_published = False contention.save() messages.info(request, u\"Argüman yayından kaldırıldı.\")",
"self).form_valid(form) form.instance.argument.update_sibling_counts() return response def get_context_data(self, **kwargs): return super(PremiseEditView, self).get_context_data( #contention=self.get_contention(), **kwargs) class",
"= self.get_premise() form.instance.contention = contention form.instance.premise = premise form.instance.reporter = self.request.user form.save() reported_as_fallacy.send(sender=self,",
"**kwargs): contentions = self.get_contentions() if self.request.user.is_authenticated(): notifications_qs = self.get_unread_notifications() notifications = list(notifications_qs) self.mark_as_read(notifications_qs)",
"tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions, **kwargs) def get_announcements(self): return Post.objects.filter(is_announcement=True) def get_offset(self): return",
"def get_unread_notifications(self): return (self.request.user .notifications .filter(is_read=False) [:5]) def mark_as_read(self, notifications): pks = notifications.values_list(\"id\",",
"premise=self.get_premise(), **kwargs) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_premise(self): return get_object_or_404(Premise, pk=self.kwargs['pk']) def",
"super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentUpdateView(UpdateView): template_name = \"premises/edit_contention.html\" form_class = ArgumentCreationForm",
"return redirect(contention) post = delete def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class ReportView(CreateView): form_class",
"in contention.published_premises(parent)] return children def user_can_report(self, premise, user): if user.is_authenticated() and user !=",
"self.get_offset() + self.paginate_by def has_next_page(self): total = self.get_contentions(paginate=False).count() return total > (self.get_offset() +",
"content = markdown(render_to_string(\"tos.md\")) return super(TosView, self).get_context_data( content=content, **kwargs) class ArgumentCreationView(CreateView): template_name = \"premises/new_contention.html\"",
"return redirect(contention) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_parent(self): parent_pk = self.kwargs.get(\"pk\") if",
"newsfeed.models import Entry class ContentionDetailView(DetailView): template_name = \"premises/contention_detail.html\" model = Contention def get_context_data(self,",
"markdown from django.contrib import messages from django.core.urlresolvers import reverse from django.utils import timezone",
"def get_context_data(self, **kwargs): return super(PremiseEditView, self).get_context_data( #contention=self.get_contention(), **kwargs) class PremiseCreationView(CreateView): template_name = \"premises/new_premise.html\"",
"get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class ReportView(CreateView): form_class = ReportForm template_name = \"premises/report.html\" def",
"form.save() contention.update_sibling_counts() if form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance) else: added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification = timezone.now() contention.save()",
"\"premises/edit_premise.html\" form_class = PremiseEditForm def get_queryset(self): premises = Premise.objects.all() if self.request.user.is_superuser: return premises",
"template_name = \"premises/report.html\" def get_context_data(self, **kwargs): return super(ReportView, self).get_context_data( premise=self.get_premise(), **kwargs) def get_contention(self):",
"from django.db.models import Count from blog.models import Post from premises.utils import int_or_zero from",
"TosView(TemplateView): template_name = \"tos.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"tos.md\")) return super(TosView, self).get_context_data(",
"= \"controversial\" def get_contentions(self, paginate=True): last_week = now() - timedelta(days=3) contentions = (Contention",
"= self.get_object() if check_content_deletion(contention): # remove notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete() messages.info(request, u\"Argümanınız silindi.\")",
"mark_as_read(self, notifications): pks = notifications.values_list(\"id\", flat=True) (self.request.user .notifications .filter(id__in=pks) .update(is_read=True)) def get_contentions(self, paginate=True):",
"return contentions class AboutView(TemplateView): template_name = \"about.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"about.md\"))",
"total > (self.get_offset() + self.paginate_by) def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return",
"contentions = (contentions[self.get_offset(): self.get_limit()]) return contentions class NotificationsView(HomeView): template_name = \"notifications.html\" def get_context_data(self,",
"contentions = self.get_contentions() if self.request.user.is_authenticated(): notifications_qs = self.get_unread_notifications() notifications = list(notifications_qs) self.mark_as_read(notifications_qs) else:",
"(Contention .objects .featured()) if paginate: contentions = (contentions[self.get_offset(): self.get_limit()]) return contentions class NotificationsView(HomeView):",
"self.get_contentions(paginate=False).count() return total > (self.get_offset() + self.paginate_by) def get_next_page_url(self): offset = self.get_offset() +",
"u\"Argümanınız silinecek durumda değil.\") return redirect(contention) delete = post class PremiseEditView(UpdateView): template_name =",
"self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise, user=self.request.user) return redirect(self.get_contention()) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class",
"class ContentionDetailView(DetailView): template_name = \"premises/contention_detail.html\" model = Contention def get_context_data(self, **kwargs): contention =",
"= \"premises/edit_premise.html\" form_class = PremiseEditForm def get_queryset(self): premises = Premise.objects.all() if self.request.user.is_superuser: return",
"form_class = PremiseEditForm def get_queryset(self): premises = Premise.objects.all() if self.request.user.is_superuser: return premises return",
"= ArgumentCreationForm def get_queryset(self): contentions = Contention.objects.all() if self.request.user.is_superuser: return contentions return contentions.filter(user=self.request.user)",
"if contention.premises.exists(): contention.is_published = True contention.save() messages.info(request, u\"Argüman yayına alındı.\") else: messages.info(request, u\"Argümanı",
"return '?offset=%(offset)s' % { \"offset\": offset } def get_unread_notifications(self): return (self.request.user .notifications .filter(is_read=False)",
"json from datetime import timedelta from markdown2 import markdown from django.contrib import messages",
"self.get_object() contention.is_published = False contention.save() messages.info(request, u\"Argüman yayından kaldırıldı.\") return redirect(contention) class ArgumentDeleteView(DetailView):",
"get_context_data(self, **kwargs): content = markdown(render_to_string(\"tos.md\")) return super(TosView, self).get_context_data( content=content, **kwargs) class ArgumentCreationView(CreateView): template_name",
"PremiseEditForm def get_queryset(self): premises = Premise.objects.all() if self.request.user.is_superuser: return premises return premises.filter(user=self.request.user) def",
"= delete def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class ReportView(CreateView): form_class = ReportForm template_name",
"return response def get_context_data(self, **kwargs): return super(PremiseEditView, self).get_context_data( #contention=self.get_contention(), **kwargs) class PremiseCreationView(CreateView): template_name",
"= self.get_contentions() if self.request.user.is_authenticated(): notifications_qs = self.get_unread_notifications() notifications = list(notifications_qs) self.mark_as_read(notifications_qs) else: notifications",
"get_context_data(self, **kwargs): return super(PremiseCreationView, self).get_context_data( contention=self.get_contention(), parent=self.get_parent(), **kwargs) def form_valid(self, form): contention =",
"**kwargs): return super(ReportView, self).get_context_data( premise=self.get_premise(), **kwargs) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_premise(self):",
"slug): contention = self.get_object() if contention.premises.exists(): contention.is_published = True contention.save() messages.info(request, u\"Argüman yayına",
"def get_context_data(self, **kwargs): return super(PremiseCreationView, self).get_context_data( contention=self.get_contention(), parent=self.get_parent(), **kwargs) def form_valid(self, form): contention",
"def get_context_data(self, **kwargs): content = markdown(render_to_string(\"about.md\")) return super(AboutView, self).get_context_data( content=content, **kwargs) class TosView(TemplateView):",
"reverse from django.utils import timezone from django.db.models import Max from django.utils.timezone import now",
"def get_context_data(self, **kwargs): contention = self.get_object() view = (\"list-view\" if self.request.GET.get(\"view\") == \"list\"",
"import Post from premises.utils import int_or_zero from premises.models import Contention, Premise from premises.forms",
"get_object_or_404(Premise, pk=self.kwargs['pk']) def get_initial(self): return { 'contention': self.get_contention(), 'premise': self.get_premise(), 'reporter': self.request.user }",
"get_unread_notifications(self): return (self.request.user .notifications .filter(is_read=False) [:5]) def mark_as_read(self, notifications): pks = notifications.values_list(\"id\", flat=True)",
"return redirect(contention) class ArgumentUnpublishView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention",
"from django.utils.timezone import now from django.http import HttpResponse from django.shortcuts import get_object_or_404, redirect",
"get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_parent(self): parent_pk = self.kwargs.get(\"pk\") if parent_pk: return get_object_or_404(Premise,",
"premises.templatetags.premise_tags import check_content_deletion from newsfeed.models import Entry class ContentionDetailView(DetailView): template_name = \"premises/contention_detail.html\" model",
"[:5]) def mark_as_read(self, notifications): pks = notifications.values_list(\"id\", flat=True) (self.request.user .notifications .filter(id__in=pks) .update(is_read=True)) def",
"premise.supporters.remove(self.request.user) return redirect(self.get_contention()) post = delete class PremiseDeleteView(View): def get_premise(self): if self.request.user.is_staff: premises",
"self.get_premise() premise.delete() premise.update_sibling_counts() contention = self.get_contention() if not contention.premises.exists(): contention.is_published = False contention.save()",
"contention.save() return redirect(contention) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_parent(self): parent_pk = self.kwargs.get(\"pk\")",
".all() .aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling'] <= 1 class HomeView(TemplateView): template_name = \"index.html\" tab_class =",
"self.request.META['REMOTE_ADDR'] response = super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentUpdateView(UpdateView): template_name = \"premises/edit_contention.html\"",
"get_queryset(self): contentions = Contention.objects.all() if self.request.user.is_superuser: return contentions return contentions.filter(user=self.request.user) def form_valid(self, form):",
"is_published=True) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class UpdatedArgumentsView(HomeView): tab_class = \"updated\"",
"from django.utils import timezone from django.db.models import Max from django.utils.timezone import now from",
"\"children\": self.get_premises(contention, user) } def get_premises(self, contention, user, parent=None): children = [{ \"pk\":",
"return Post.objects.filter(is_announcement=True) def get_offset(self): return int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self): return self.get_offset() + self.paginate_by def",
"from datetime import timedelta from markdown2 import markdown from django.contrib import messages from",
"return contentions class NotificationsView(HomeView): template_name = \"notifications.html\" def get_context_data(self, **kwargs): notifications_qs = self.request.user.notifications.all()[:40]",
"get_object_or_404(Premise, pk=parent_pk) class PremiseSupportView(View): def get_premise(self): premises = Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def",
"notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions, **kwargs) def get_announcements(self): return Post.objects.filter(is_announcement=True) def get_offset(self): return int_or_zero(self.request.GET.get(\"offset\"))",
"get_premise(self): return get_object_or_404(Premise, pk=self.kwargs['pk']) def get_initial(self): return { 'contention': self.get_contention(), 'premise': self.get_premise(), 'reporter':",
"self.get_object() if check_content_deletion(contention): # remove notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete() messages.info(request, u\"Argümanınız silindi.\") return",
"\"\" def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s' % { \"offset\":",
"return contentions class ControversialArgumentsView(HomeView): tab_class = \"controversial\" def get_contentions(self, paginate=True): last_week = now()",
"# -*- coding:utf-8 -*- import json from datetime import timedelta from markdown2 import",
"\"is_singular\": self.is_singular(contention), \"children\": self.get_premises(contention, user) } def get_premises(self, contention, user, parent=None): children =",
"request, slug): contention = self.get_object() if check_content_deletion(contention): # remove notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete()",
"self.request.user form.instance.ip_address = self.request.META['REMOTE_ADDR'] response = super(ArgumentCreationView, self).form_valid(form) form.instance.update_sibling_counts() return response class ArgumentUpdateView(UpdateView):",
"blog.models import Post from premises.utils import int_or_zero from premises.models import Contention, Premise from",
"user, parent=premise) if premise.published_children().exists() else []) } for premise in contention.published_premises(parent)] return children",
"\"premises/new_premise.html\" form_class = PremiseCreationForm def get_context_data(self, **kwargs): return super(PremiseCreationView, self).get_context_data( contention=self.get_contention(), parent=self.get_parent(), **kwargs)",
"form): contention = self.get_contention() premise = self.get_premise() form.instance.contention = contention form.instance.premise = premise",
"Contention def get_context_data(self, **kwargs): contention = self.get_object() view = (\"list-view\" if self.request.GET.get(\"view\") ==",
"return super(SearchView, self).get_context_data( keywords=self.get_keywords(), **kwargs ) def get_keywords(self): return self.request.GET.get('keywords') or \"\" def",
"% { \"offset\": offset } def get_unread_notifications(self): return (self.request.user .notifications .filter(is_read=False) [:5]) def",
"def get_limit(self): return self.get_offset() + self.paginate_by def has_next_page(self): total = self.get_contentions(paginate=False).count() return total",
"= Premise.objects.all() else: premises = Premise.objects.filter(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def delete(self, request, *args,",
"UpdatedArgumentsView(HomeView): tab_class = \"updated\" def get_contentions(self, paginate=True): contentions = (Contention .objects .filter(is_published=True) .order_by('-date_modification'))",
"get_premises(self, contention, user, parent=None): children = [{ \"pk\": premise.pk, \"name\": premise.text, \"parent\": parent.text",
"notifications = list(notifications_qs) self.mark_as_read(notifications_qs) else: notifications = None return super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class,",
"= contention form.instance.parent = self.get_parent() form.instance.is_approved = True form.instance.ip_address = self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts()",
"return super(ReportView, self).get_context_data( premise=self.get_premise(), **kwargs) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_premise(self): return",
"contention = self.get_contention() if not contention.premises.exists(): contention.is_published = False contention.save() return redirect(contention) post",
"değil.\") return redirect(contention) delete = post class PremiseEditView(UpdateView): template_name = \"premises/edit_premise.html\" form_class =",
"premise in contention.published_premises(parent)] return children def user_can_report(self, premise, user): if user.is_authenticated() and user",
"contentions = (Contention .objects .annotate(num_children=Count('premises')) .order_by('-num_children') .filter(date_modification__gte=last_week)) if paginate: return contentions[self.get_offset():self.get_limit()] return contentions",
"premise, user): if user.is_authenticated() and user != premise.user: return not premise.reported_by(user) return False",
"django.utils.timezone import now from django.http import HttpResponse from django.shortcuts import get_object_or_404, redirect from",
"= Contention def render_to_response(self, context, **response_kwargs): contention = self.get_object(self.get_queryset()) return HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention,",
"= notifications.values_list(\"id\", flat=True) (self.request.user .notifications .filter(id__in=pks) .update(is_read=True)) def get_contentions(self, paginate=True): contentions = (Contention",
"def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView): def delete(self, request, *args, **kwargs): premise",
"contention, user): return { \"name\": contention.title, \"parent\": None, \"pk\": contention.pk, \"owner\": contention.owner, \"sources\":",
"almadan önce en az 1 \" u\"önerme ekleyin.\") return redirect(contention) class ArgumentUnpublishView(DetailView): def",
"contention.is_published = True contention.save() messages.info(request, u\"Argüman yayına alındı.\") else: messages.info(request, u\"Argümanı yayına almadan",
"get_initial(self): return { 'contention': self.get_contention(), 'premise': self.get_premise(), 'reporter': self.request.user } def form_valid(self, form):",
"notifications_qs = self.get_unread_notifications() notifications = list(notifications_qs) self.mark_as_read(notifications_qs) else: notifications = None return super(HomeView,",
"(self.request.user .notifications .filter(is_read=False) [:5]) def mark_as_read(self, notifications): pks = notifications.values_list(\"id\", flat=True) (self.request.user .notifications",
"get_contentions(self, paginate=True): last_week = now() - timedelta(days=3) contentions = (Contention .objects .annotate(num_children=Count('premises')) .order_by('-num_children')",
"from django.shortcuts import get_object_or_404, redirect from django.template.loader import render_to_string from django.views.generic import DetailView,",
"= \"tos.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"tos.md\")) return super(TosView, self).get_context_data( content=content, **kwargs)",
"premises = Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def post(self, request, *args, **kwargs): premise =",
"= Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def post(self, request, *args, **kwargs): premise = self.get_premise()",
"not contention.premises.exists(): contention.is_published = False contention.save() return redirect(contention) post = delete def get_contention(self):",
"= None return super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications, has_next_page=self.has_next_page(), announcements=self.get_announcements(), contentions=contentions, **kwargs) def",
"\"premises/contention_detail.html\" model = Contention def get_context_data(self, **kwargs): contention = self.get_object() view = (\"list-view\"",
"return redirect(self.get_contention()) post = delete class PremiseDeleteView(View): def get_premise(self): if self.request.user.is_staff: premises =",
"contention, user, parent=None): children = [{ \"pk\": premise.pk, \"name\": premise.text, \"parent\": parent.text if",
"def get_context_data(self, **kwargs): return super(SearchView, self).get_context_data( keywords=self.get_keywords(), **kwargs ) def get_keywords(self): return self.request.GET.get('keywords')",
"self.mark_as_read(notifications_qs) return super(HomeView, self).get_context_data( notifications=notifications, **kwargs) class SearchView(HomeView): tab_class = 'search' def get_context_data(self,",
"= Contention.objects.filter( is_published=True) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class UpdatedArgumentsView(HomeView): tab_class",
"import messages from django.core.urlresolvers import reverse from django.utils import timezone from django.db.models import",
"= list(notifications_qs) self.mark_as_read(notifications_qs) else: notifications = None return super(HomeView, self).get_context_data( next_page_url=self.get_next_page_url(), tab_class=self.tab_class, notifications=notifications,",
"self.get_limit()]) return contentions class NotificationsView(HomeView): template_name = \"notifications.html\" def get_context_data(self, **kwargs): notifications_qs =",
"def get_context_data(self, **kwargs): return super(ReportView, self).get_context_data( premise=self.get_premise(), **kwargs) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug'])",
"= self.get_contentions(paginate=False).count() return total > (self.get_offset() + self.paginate_by) def get_next_page_url(self): offset = self.get_offset()",
"Contention def render_to_response(self, context, **response_kwargs): contention = self.get_object(self.get_queryset()) return HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention, self.request.user),",
"template_name = \"premises/new_contention.html\" form_class = ArgumentCreationForm def form_valid(self, form): form.instance.user = self.request.user form.instance.ip_address",
"(Contention .objects .filter(title__icontains=keywords)) if paginate: result = result[self.get_offset():self.get_limit()] return result class NewsView(HomeView): tab_class",
"PremiseEditView(UpdateView): template_name = \"premises/edit_premise.html\" form_class = PremiseEditForm def get_queryset(self): premises = Premise.objects.all() if",
"super(PremiseCreationView, self).get_context_data( contention=self.get_contention(), parent=self.get_parent(), **kwargs) def form_valid(self, form): contention = self.get_contention() form.instance.user =",
"django.core.urlresolvers import reverse from django.utils import timezone from django.db.models import Max from django.utils.timezone",
"notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete() messages.info(request, u\"Argümanınız silindi.\") return redirect(\"home\") else: messages.info(request, u\"Argümanınız silinecek",
"# remove notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id) contention.delete() messages.info(request, u\"Argümanınız silindi.\") return redirect(\"home\") else: messages.info(request,",
"slug=self.kwargs['slug']) def get_premise(self): return get_object_or_404(Premise, pk=self.kwargs['pk']) def get_initial(self): return { 'contention': self.get_contention(), 'premise':",
"self.request.GET.get(\"view\") == \"list\" else \"tree-view\") edit_mode = ( self.request.user.is_superuser or self.request.user.is_staff or contention.user",
"yayına alındı.\") else: messages.info(request, u\"Argümanı yayına almadan önce en az 1 \" u\"önerme",
"def get_offset(self): return int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self): return self.get_offset() + self.paginate_by def has_next_page(self): total",
"PremiseEditForm, ReportForm) from premises.signals import (added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise) from premises.templatetags.premise_tags import check_content_deletion",
"total = self.get_contentions(paginate=False).count() return total > (self.get_offset() + self.paginate_by) def get_next_page_url(self): offset =",
"return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() if check_content_deletion(contention): # remove",
"user_can_report(self, premise, user): if user.is_authenticated() and user != premise.user: return not premise.reported_by(user) return",
"parent=None): children = [{ \"pk\": premise.pk, \"name\": premise.text, \"parent\": parent.text if parent else",
"contention.save() messages.info(request, u\"Argüman yayından kaldırıldı.\") return redirect(contention) class ArgumentDeleteView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user)",
"get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() if check_content_deletion(contention): #",
"from premises.models import Contention, Premise from premises.forms import (ArgumentCreationForm, PremiseCreationForm, PremiseEditForm, ReportForm) from",
"timezone from django.db.models import Max from django.utils.timezone import now from django.http import HttpResponse",
"self).get_context_data( notifications=notifications, **kwargs) class SearchView(HomeView): tab_class = 'search' def get_context_data(self, **kwargs): return super(SearchView,",
"def user_can_report(self, premise, user): if user.is_authenticated() and user != premise.user: return not premise.reported_by(user)",
"contentions class ControversialArgumentsView(HomeView): tab_class = \"controversial\" def get_contentions(self, paginate=True): last_week = now() -",
"paginate_by = 20 def get_context_data(self, **kwargs): contentions = self.get_contentions() if self.request.user.is_authenticated(): notifications_qs =",
"self.request.user } def form_valid(self, form): contention = self.get_contention() premise = self.get_premise() form.instance.contention =",
"if paginate: result = result[self.get_offset():self.get_limit()] return result class NewsView(HomeView): tab_class = \"news\" def",
"contention = self.get_contention() form.instance.user = self.request.user form.instance.argument = contention form.instance.parent = self.get_parent() form.instance.is_approved",
"yayından kaldırıldı.\") return redirect(contention) class ArgumentDeleteView(DetailView): def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request,",
"contentions = (Contention .objects .featured()) if paginate: contentions = (contentions[self.get_offset(): self.get_limit()]) return contentions",
"announcements=self.get_announcements(), contentions=contentions, **kwargs) def get_announcements(self): return Post.objects.filter(is_announcement=True) def get_offset(self): return int_or_zero(self.request.GET.get(\"offset\")) def get_limit(self):",
"get_queryset(self): premises = Premise.objects.all() if self.request.user.is_superuser: return premises return premises.filter(user=self.request.user) def form_valid(self, form):",
"\"featured\" paginate_by = 20 def get_context_data(self, **kwargs): contentions = self.get_contentions() if self.request.user.is_authenticated(): notifications_qs",
"from django.db.models import Max from django.utils.timezone import now from django.http import HttpResponse from",
"Contention.objects.filter( is_published=True) if paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class UpdatedArgumentsView(HomeView): tab_class =",
"form_class = PremiseCreationForm def get_context_data(self, **kwargs): return super(PremiseCreationView, self).get_context_data( contention=self.get_contention(), parent=self.get_parent(), **kwargs) def",
"return get_object_or_404(Contention, slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView): def delete(self, request, *args, **kwargs): premise = self.get_premise()",
"DetailView, TemplateView, CreateView, View from django.views.generic.edit import UpdateView from django.db.models import Count from",
"ArgumentCreationForm def form_valid(self, form): form.instance.user = self.request.user form.instance.ip_address = self.request.META['REMOTE_ADDR'] response = super(ArgumentCreationView,",
"contention.date_modification = timezone.now() contention.save() return redirect(contention) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_parent(self):",
"= timezone.now() contention.save() return redirect(contention) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_parent(self): parent_pk",
"contentions = contentions[self.get_offset():self.get_limit()] return contentions class ControversialArgumentsView(HomeView): tab_class = \"controversial\" def get_contentions(self, paginate=True):",
"from premises.forms import (ArgumentCreationForm, PremiseCreationForm, PremiseEditForm, ReportForm) from premises.signals import (added_premise_for_premise, added_premise_for_contention, reported_as_fallacy,",
"= (Contention .objects .featured()) if paginate: contentions = (contentions[self.get_offset(): self.get_limit()]) return contentions class",
"redirect(self.get_contention()) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView): def delete(self, request, *args, **kwargs):",
"from newsfeed.models import Entry class ContentionDetailView(DetailView): template_name = \"premises/contention_detail.html\" model = Contention def",
"= Contention def get_context_data(self, **kwargs): contention = self.get_object() view = (\"list-view\" if self.request.GET.get(\"view\")",
"= super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts() return response def get_context_data(self, **kwargs): return super(PremiseEditView, self).get_context_data( #contention=self.get_contention(),",
"form.instance.is_approved = True form.instance.ip_address = self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts() if form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance) else:",
"premise.delete() premise.update_sibling_counts() contention = self.get_contention() if not contention.premises.exists(): contention.is_published = False contention.save() return",
"**kwargs) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug']) def get_premise(self): return get_object_or_404(Premise, pk=self.kwargs['pk']) def get_initial(self):",
"ReportForm template_name = \"premises/report.html\" def get_context_data(self, **kwargs): return super(ReportView, self).get_context_data( premise=self.get_premise(), **kwargs) def",
"paginate: contentions = contentions[self.get_offset():self.get_limit()] return contentions class ControversialArgumentsView(HomeView): tab_class = \"controversial\" def get_contentions(self,",
"def get_premise(self): premises = Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def post(self, request, *args, **kwargs):",
"contentions return contentions.filter(user=self.request.user) def form_valid(self, form): form.instance.user = self.request.user response = super(ArgumentUpdateView, self).form_valid(form)",
"def form_valid(self, form): form.instance.user = self.request.user response = super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts() return response",
"get_premise(self): premises = Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def post(self, request, *args, **kwargs): premise",
"slug=self.kwargs['slug']) class PremiseUnsupportView(PremiseSupportView): def delete(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.remove(self.request.user) return",
"ArgumentUpdateView(UpdateView): template_name = \"premises/edit_contention.html\" form_class = ArgumentCreationForm def get_queryset(self): contentions = Contention.objects.all() if",
"'premise': self.get_premise(), 'reporter': self.request.user } def form_valid(self, form): contention = self.get_contention() premise =",
"premise.reported_by(user) return False def is_singular(self, contention): result = (contention .premises .all() .aggregate(max_sibling=Max('sibling_count'))) return",
"notifications = list(notifications_qs) self.mark_as_read(notifications_qs) return super(HomeView, self).get_context_data( notifications=notifications, **kwargs) class SearchView(HomeView): tab_class =",
"class AboutView(TemplateView): template_name = \"about.html\" def get_context_data(self, **kwargs): content = markdown(render_to_string(\"about.md\")) return super(AboutView,",
"contentions[self.get_offset():self.get_limit()] return contentions class UpdatedArgumentsView(HomeView): tab_class = \"updated\" def get_contentions(self, paginate=True): contentions =",
"contention.is_published = False contention.save() messages.info(request, u\"Argüman yayından kaldırıldı.\") return redirect(contention) class ArgumentDeleteView(DetailView): def",
"or self.request.user.is_staff or contention.user == self.request.user) return super(ContentionDetailView, self).get_context_data( view=view, path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs)",
"\"controversial\" def get_contentions(self, paginate=True): last_week = now() - timedelta(days=3) contentions = (Contention .objects",
"premise.premise_class(), \"children\": (self.get_premises(contention, user, parent=premise) if premise.published_children().exists() else []) } for premise in",
"Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() if check_content_deletion(contention): # remove notification",
"\"name\": contention.title, \"parent\": None, \"pk\": contention.pk, \"owner\": contention.owner, \"sources\": contention.sources, \"is_singular\": self.is_singular(contention), \"children\":",
"get_object_or_404(premises, pk=self.kwargs['pk']) def delete(self, request, *args, **kwargs): premise = self.get_premise() premise.delete() premise.update_sibling_counts() contention",
"= self.get_premise() premise.delete() premise.update_sibling_counts() contention = self.get_contention() if not contention.premises.exists(): contention.is_published = False",
"\"sources\": contention.sources, \"is_singular\": self.is_singular(contention), \"children\": self.get_premises(contention, user) } def get_premises(self, contention, user, parent=None):",
"self.get_keywords() } def get_contentions(self, paginate=True): keywords = self.request.GET.get('keywords') if not keywords or len(keywords)",
"= True contention.save() messages.info(request, u\"Argüman yayına alındı.\") else: messages.info(request, u\"Argümanı yayına almadan önce",
"render_to_response(self, context, **response_kwargs): contention = self.get_object(self.get_queryset()) return HttpResponse(json.dumps({ \"nodes\": self.build_tree(contention, self.request.user), }), content_type=\"application/json\")",
"added_premise_for_premise.send(sender=self, premise=form.instance) else: added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification = timezone.now() contention.save() return redirect(contention) def get_contention(self):",
"def form_valid(self, form): contention = self.get_contention() form.instance.user = self.request.user form.instance.argument = contention form.instance.parent",
"return contentions class UpdatedArgumentsView(HomeView): tab_class = \"updated\" def get_contentions(self, paginate=True): contentions = (Contention",
"get_contentions(self, paginate=True): contentions = (Contention .objects .filter(is_published=True) .order_by('-date_modification')) if paginate: contentions = contentions[self.get_offset():self.get_limit()]",
".filter(id__in=pks) .update(is_read=True)) def get_contentions(self, paginate=True): contentions = (Contention .objects .featured()) if paginate: contentions",
"**kwargs): content = markdown(render_to_string(\"tos.md\")) return super(TosView, self).get_context_data( content=content, **kwargs) class ArgumentCreationView(CreateView): template_name =",
"return (self.request.user .notifications .filter(is_read=False) [:5]) def mark_as_read(self, notifications): pks = notifications.values_list(\"id\", flat=True) (self.request.user",
"\"pk\": contention.pk, \"owner\": contention.owner, \"sources\": contention.sources, \"is_singular\": self.is_singular(contention), \"children\": self.get_premises(contention, user) } def",
"Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def post(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.add(self.request.user)",
"def get_premise(self): return get_object_or_404(Premise, pk=self.kwargs['pk']) def get_initial(self): return { 'contention': self.get_contention(), 'premise': self.get_premise(),",
"PremiseSupportView(View): def get_premise(self): premises = Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def post(self, request, *args,",
"ReportForm) from premises.signals import (added_premise_for_premise, added_premise_for_contention, reported_as_fallacy, supported_a_premise) from premises.templatetags.premise_tags import check_content_deletion from",
"tab_class = \"updated\" def get_contentions(self, paginate=True): contentions = (Contention .objects .filter(is_published=True) .order_by('-date_modification')) if",
"template_name = \"premises/new_premise.html\" form_class = PremiseCreationForm def get_context_data(self, **kwargs): return super(PremiseCreationView, self).get_context_data( contention=self.get_contention(),",
"pk=parent_pk) class PremiseSupportView(View): def get_premise(self): premises = Premise.objects.exclude(user=self.request.user) return get_object_or_404(premises, pk=self.kwargs['pk']) def post(self,",
"self).get_context_data( content=content, **kwargs) class TosView(TemplateView): template_name = \"tos.html\" def get_context_data(self, **kwargs): content =",
"return premises return premises.filter(user=self.request.user) def form_valid(self, form): response = super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts() return",
"response class ArgumentUpdateView(UpdateView): template_name = \"premises/edit_contention.html\" form_class = ArgumentCreationForm def get_queryset(self): contentions =",
"contention.owner, \"sources\": contention.sources, \"is_singular\": self.is_singular(contention), \"children\": self.get_premises(contention, user) } def get_premises(self, contention, user,",
"post(self, request, slug): contention = self.get_object() if check_content_deletion(contention): # remove notification Entry.objects.delete(contention.get_newsfeed_type(), contention.id)",
"parent.text if parent else None, \"reportable_by_authenticated_user\": self.user_can_report(premise, user), \"report_count\": premise.reports.count(), \"user\": { \"id\":",
"= \"notifications.html\" def get_context_data(self, **kwargs): notifications_qs = self.request.user.notifications.all()[:40] notifications = list(notifications_qs) self.mark_as_read(notifications_qs) return",
"contentions.filter(user=self.request.user) def form_valid(self, form): form.instance.user = self.request.user response = super(ArgumentUpdateView, self).form_valid(form) form.instance.update_sibling_counts() return",
"self.request.META['REMOTE_ADDR'] form.save() contention.update_sibling_counts() if form.instance.parent: added_premise_for_premise.send(sender=self, premise=form.instance) else: added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification = timezone.now()",
"added_premise_for_contention.send(sender=self, premise=form.instance) contention.date_modification = timezone.now() contention.save() return redirect(contention) def get_contention(self): return get_object_or_404(Contention, slug=self.kwargs['slug'])",
"\"user\": { \"id\": premise.user.id, \"username\": premise.user.username, \"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username]) }, \"sources\": premise.sources, \"premise_type\":",
"(contention .premises .all() .aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling'] <= 1 class HomeView(TemplateView): template_name = \"index.html\"",
"Premise.objects.all() if self.request.user.is_superuser: return premises return premises.filter(user=self.request.user) def form_valid(self, form): response = super(PremiseEditView,",
"get_contentions(self, paginate=True): contentions = (Contention .objects .featured()) if paginate: contentions = (contentions[self.get_offset(): self.get_limit()])",
"def delete(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.remove(self.request.user) return redirect(self.get_contention()) post =",
"django.contrib import messages from django.core.urlresolvers import reverse from django.utils import timezone from django.db.models",
"class PremiseEditView(UpdateView): template_name = \"premises/edit_premise.html\" form_class = PremiseEditForm def get_queryset(self): premises = Premise.objects.all()",
"import render_to_string from django.views.generic import DetailView, TemplateView, CreateView, View from django.views.generic.edit import UpdateView",
"def get_queryset(self): return Contention.objects.filter(user=self.request.user) def post(self, request, slug): contention = self.get_object() if contention.premises.exists():",
"get_object_or_404(premises, pk=self.kwargs['pk']) def post(self, request, *args, **kwargs): premise = self.get_premise() premise.supporters.add(self.request.user) supported_a_premise.send(sender=self, premise=premise,",
"self.request.user) return super(ContentionDetailView, self).get_context_data( view=view, path=contention.get_absolute_url(), edit_mode=edit_mode, **kwargs) class ContentionJsonView(DetailView): model = Contention",
"def get_contentions(self, paginate=True): contentions = (Contention .objects .featured()) if paginate: contentions = (contentions[self.get_offset():",
"\"index.html\" tab_class = \"featured\" paginate_by = 20 def get_context_data(self, **kwargs): contentions = self.get_contentions()",
".notifications .filter(is_read=False) [:5]) def mark_as_read(self, notifications): pks = notifications.values_list(\"id\", flat=True) (self.request.user .notifications .filter(id__in=pks)",
"def get_next_page_url(self): offset = self.get_offset() + self.paginate_by return '?offset=%(offset)s&keywords=%(keywords)s' % { \"offset\": offset,",
"django.shortcuts import get_object_or_404, redirect from django.template.loader import render_to_string from django.views.generic import DetailView, TemplateView,",
"request, *args, **kwargs): premise = self.get_premise() premise.supporters.remove(self.request.user) return redirect(self.get_contention()) post = delete class",
"premise.reports.count(), \"user\": { \"id\": premise.user.id, \"username\": premise.user.username, \"absolute_url\": reverse(\"auth_profile\", args=[premise.user.username]) }, \"sources\": premise.sources,",
"result = (contention .premises .all() .aggregate(max_sibling=Max('sibling_count'))) return result['max_sibling'] <= 1 class HomeView(TemplateView): template_name",
"def mark_as_read(self, notifications): pks = notifications.values_list(\"id\", flat=True) (self.request.user .notifications .filter(id__in=pks) .update(is_read=True)) def get_contentions(self,",
"parent=self.get_parent(), **kwargs) def form_valid(self, form): contention = self.get_contention() form.instance.user = self.request.user form.instance.argument =",
"added_premise_for_contention, reported_as_fallacy, supported_a_premise) from premises.templatetags.premise_tags import check_content_deletion from newsfeed.models import Entry class ContentionDetailView(DetailView):",
"return premises.filter(user=self.request.user) def form_valid(self, form): response = super(PremiseEditView, self).form_valid(form) form.instance.argument.update_sibling_counts() return response def"
] |
[
"and B. L2 norm is ``sqrt(sum(|x_i - y_i| ^ 2))`` Args: a (Tensor",
"of points in a batch, D is the dimension of the Euclidian space.",
"tf.expand_dims(a, 2) # N x K x 1 x D b_ext = tf.expand_dims(b,",
"b_ext = tf.expand_dims(b, 1) # N x 1 x M x D a2",
"a_ext, axis=3) # N x K x 1 b2 = tf.reduce_sum(b_ext * b_ext,",
"M]: pairwise L-infinity distance between each pair of points. \"\"\" a = tf.convert_to_tensor(a)",
"in a batch, D is the dimension of the euclidian space. sqrt (bool,",
"Defaults to True. Returns: Tensor [N x K x M]: pairwise L2 distance",
"L1 distance between each pair of points. \"\"\" a = tf.convert_to_tensor(a) b =",
"points in a batch, D is the dimension of the euclidian space. sqrt",
"[N x K x M]: pairwise L-infinity distance between each pair of points.",
"L2_square = a2 + b2 - 2 * ab # N x K",
"tf.expand_dims(b, 1) # N x 1 x M x D a2 = tf.reduce_sum(a_ext",
"square root. Defaults to True. Returns: Tensor [N x K x M]: pairwise",
"L2 distance between each pair of points. \"\"\" a = tf.convert_to_tensor(a) b =",
"between all points in A and B. L1 norm is ``sum(|x_i - y_i|)``",
"A and B. L2 norm is ``sqrt(sum(|x_i - y_i| ^ 2))`` Args: a",
"R^D. \"\"\" import tensorflow as tf def pairwise_l2_distance(a, b, sqrt=True): \"\"\"Compute pairwise L2",
"= tf.expand_dims(b, 1) # N x 1 x M x D a2 =",
"is the dimension of the Euclidian space. b (Tensor [N x M x",
"dimension of the euclidian space. sqrt (bool, optional): whether take the square root.",
"space R^D. \"\"\" import tensorflow as tf def pairwise_l2_distance(a, b, sqrt=True): \"\"\"Compute pairwise",
"points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) # (a_i - b_j)^2 =",
"axis=3) # N x K x M def pairwise_l_inf_distance(a, b): \"\"\"Compute pairwise L∞",
"# N x 1 x M x D return tf.reduce_sum(tf.abs(a_ext - b_ext), axis=3)",
"axis=3) # N x 1 x M ab = tf.matmul(a, tf.transpose(b, (0, 2,",
"2 * a_i * b_j a_ext = tf.expand_dims(a, 2) # N x K",
"Tensor [N x K x M]: pairwise L1 distance between each pair of",
"of points in a batch, D is the dimension of the euclidian space.",
"B. L1 norm is ``sum(|x_i - y_i|)`` Args: a (Tensor [N x K",
"# N x K x 1 b2 = tf.reduce_sum(b_ext * b_ext, axis=3) #",
"b, sqrt=True): \"\"\"Compute pairwise L2 distance between all points in A and B.",
"N x 1 x M ab = tf.matmul(a, tf.transpose(b, (0, 2, 1))) #",
"operations =================== Batch operations on points in D-dimensional Eucledian space R^D. \"\"\" import",
"tf.matmul(a, tf.transpose(b, (0, 2, 1))) # N x K x M L2_square =",
"M x D return tf.reduce_max(tf.abs(a_ext - b_ext), axis=3) # N x K x",
"is ``sum(|x_i - y_i|)`` Args: a (Tensor [N x K x D]): point",
"= tf.expand_dims(b, 1) # N x 1 x M x D return tf.reduce_max(tf.abs(a_ext",
"N x 1 x M x D return tf.reduce_max(tf.abs(a_ext - b_ext), axis=3) #",
"Args: a (Tensor [N x K x D]): point coordinates. N is batch",
"coordinates, N is batch size, M is the number of points in a",
"b = tf.convert_to_tensor(b) # (a_i - b_j)^2 = a_i^2 + b_j^2 - 2",
"of the Euclidian space. b (Tensor [N x M x D]): point coordinates,",
"D-dimensional Eucledian space R^D. \"\"\" import tensorflow as tf def pairwise_l2_distance(a, b, sqrt=True):",
"def pairwise_l1_distance(a, b): \"\"\"Compute pairwise L1 distance between all points in A and",
"distance between all points in A and B. L-infinity is ``max(|x_i - y_i|)``",
"sqrt: return L2_square ** 0.5 else: return L2_square def pairwise_l1_distance(a, b): \"\"\"Compute pairwise",
"x K x M]: pairwise L1 distance between each pair of points. \"\"\"",
"\"\"\"Compute pairwise L1 distance between all points in A and B. L1 norm",
"K x M def pairwise_l_inf_distance(a, b): \"\"\"Compute pairwise L∞ distance between all points",
"batch size, M is the number of points in a batch, D is",
"b_j)^2 = a_i^2 + b_j^2 - 2 * a_i * b_j a_ext =",
"1 x M x D a2 = tf.reduce_sum(a_ext * a_ext, axis=3) # N",
"= tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) a_ext = tf.expand_dims(a, 2) # N x K",
"# N x 1 x M x D a2 = tf.reduce_sum(a_ext * a_ext,",
"B. L-infinity is ``max(|x_i - y_i|)`` Args: a (Tensor [N x K x",
"L2 distance between all points in A and B. L2 norm is ``sqrt(sum(|x_i",
"[N x K x D]): point coordinates. N is batch size, K is",
"of the euclidian space. sqrt (bool, optional): whether take the square root. Defaults",
"tf.reduce_sum(tf.abs(a_ext - b_ext), axis=3) # N x K x M def pairwise_l_inf_distance(a, b):",
"L2 norm is ``sqrt(sum(|x_i - y_i| ^ 2))`` Args: a (Tensor [N x",
"N x K x 1 x D b_ext = tf.expand_dims(b, 1) # N",
"the number of points in a batch, D is the dimension of the",
"``max(|x_i - y_i|)`` Args: a (Tensor [N x K x D]): point coordinates.",
"x M x D return tf.reduce_max(tf.abs(a_ext - b_ext), axis=3) # N x K",
"space. Returns: Tensor [N x K x M]: pairwise L1 distance between each",
"number of points in a batch, D is the dimension of the Euclidian",
"``sum(|x_i - y_i|)`` Args: a (Tensor [N x K x D]): point coordinates.",
"* b_j a_ext = tf.expand_dims(a, 2) # N x K x 1 x",
"of points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) # (a_i - b_j)^2",
"x 1 x D b_ext = tf.expand_dims(b, 1) # N x 1 x",
"in A and B. L-infinity is ``max(|x_i - y_i|)`` Args: a (Tensor [N",
"tf.transpose(b, (0, 2, 1))) # N x K x M L2_square = a2",
"x D]): point coordinates. N is batch size, K is the number of",
"euclidian space. sqrt (bool, optional): whether take the square root. Defaults to True.",
"x M if sqrt: return L2_square ** 0.5 else: return L2_square def pairwise_l1_distance(a,",
"(a_i - b_j)^2 = a_i^2 + b_j^2 - 2 * a_i * b_j",
"K x M L2_square = a2 + b2 - 2 * ab #",
"between all points in A and B. L-infinity is ``max(|x_i - y_i|)`` Args:",
"point coordinates. N is batch size, K is the number of points in",
"x K x M def pairwise_l_inf_distance(a, b): \"\"\"Compute pairwise L∞ distance between all",
"x 1 x M x D a2 = tf.reduce_sum(a_ext * a_ext, axis=3) #",
"def pairwise_l_inf_distance(a, b): \"\"\"Compute pairwise L∞ distance between all points in A and",
"tf.reduce_sum(a_ext * a_ext, axis=3) # N x K x 1 b2 = tf.reduce_sum(b_ext",
"Returns: Tensor [N x K x M]: pairwise L2 distance between each pair",
"L-infinity distance between each pair of points. \"\"\" a = tf.convert_to_tensor(a) b =",
"and B. L1 norm is ``sum(|x_i - y_i|)`` Args: a (Tensor [N x",
"- y_i|)`` Args: a (Tensor [N x K x D]): point coordinates. N",
"norm is ``sum(|x_i - y_i|)`` Args: a (Tensor [N x K x D]):",
"N x K x M if sqrt: return L2_square ** 0.5 else: return",
"ab = tf.matmul(a, tf.transpose(b, (0, 2, 1))) # N x K x M",
"b_ext = tf.expand_dims(b, 1) # N x 1 x M x D return",
"pairwise L2 distance between all points in A and B. L2 norm is",
"M ab = tf.matmul(a, tf.transpose(b, (0, 2, 1))) # N x K x",
"1 x D b_ext = tf.expand_dims(b, 1) # N x 1 x M",
"1 b2 = tf.reduce_sum(b_ext * b_ext, axis=3) # N x 1 x M",
"norm is ``sqrt(sum(|x_i - y_i| ^ 2))`` Args: a (Tensor [N x K",
"a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) a_ext = tf.expand_dims(a, 2) # N x",
"take the square root. Defaults to True. Returns: Tensor [N x K x",
"pairwise L1 distance between each pair of points. \"\"\" a = tf.convert_to_tensor(a) b",
"D return tf.reduce_sum(tf.abs(a_ext - b_ext), axis=3) # N x K x M def",
"pairwise L2 distance between each pair of points. \"\"\" a = tf.convert_to_tensor(a) b",
"N is batch size, M is the number of points in a batch,",
"(0, 2, 1))) # N x K x M L2_square = a2 +",
"x M x D a2 = tf.reduce_sum(a_ext * a_ext, axis=3) # N x",
"b): \"\"\"Compute pairwise L∞ distance between all points in A and B. L-infinity",
"N x 1 x M x D return tf.reduce_sum(tf.abs(a_ext - b_ext), axis=3) #",
"A and B. L-infinity is ``max(|x_i - y_i|)`` Args: a (Tensor [N x",
"in a batch, D is the dimension of the Euclidian space. b (Tensor",
"x 1 x M x D return tf.reduce_sum(tf.abs(a_ext - b_ext), axis=3) # N",
"\"\"\"Compute pairwise L2 distance between all points in A and B. L2 norm",
"- b_j)^2 = a_i^2 + b_j^2 - 2 * a_i * b_j a_ext",
"D b_ext = tf.expand_dims(b, 1) # N x 1 x M x D",
"points in a batch, D is the dimension of the euclidian space. Returns:",
"b2 = tf.reduce_sum(b_ext * b_ext, axis=3) # N x 1 x M ab",
"points in a batch, D is the dimension of the Euclidian space. b",
"in D-dimensional Eucledian space R^D. \"\"\" import tensorflow as tf def pairwise_l2_distance(a, b,",
"\"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) a_ext = tf.expand_dims(a, 2) # N",
"the dimension of the Euclidian space. b (Tensor [N x M x D]):",
"= tf.convert_to_tensor(b) # (a_i - b_j)^2 = a_i^2 + b_j^2 - 2 *",
"# (a_i - b_j)^2 = a_i^2 + b_j^2 - 2 * a_i *",
"b2 - 2 * ab # N x K x M if sqrt:",
"[N x K x M]: pairwise L2 distance between each pair of points.",
"M x D]): point coordinates, N is batch size, M is the number",
"(Tensor [N x M x D]): point coordinates, N is batch size, M",
"and B. L-infinity is ``max(|x_i - y_i|)`` Args: a (Tensor [N x K",
"return tf.reduce_sum(tf.abs(a_ext - b_ext), axis=3) # N x K x M def pairwise_l_inf_distance(a,",
"all points in A and B. L-infinity is ``max(|x_i - y_i|)`` Args: a",
"Tensor [N x K x M]: pairwise L-infinity distance between each pair of",
"N x 1 x M x D a2 = tf.reduce_sum(a_ext * a_ext, axis=3)",
"- y_i| ^ 2))`` Args: a (Tensor [N x K x D]): point",
"= tf.reduce_sum(a_ext * a_ext, axis=3) # N x K x 1 b2 =",
"L2_square def pairwise_l1_distance(a, b): \"\"\"Compute pairwise L1 distance between all points in A",
"M x D a2 = tf.reduce_sum(a_ext * a_ext, axis=3) # N x K",
"points in A and B. L-infinity is ``max(|x_i - y_i|)`` Args: a (Tensor",
"= tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) # (a_i - b_j)^2 = a_i^2 + b_j^2",
"1) # N x 1 x M x D a2 = tf.reduce_sum(a_ext *",
"between each pair of points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) a_ext",
"coordinates. N is batch size, K is the number of points in a",
"K x 1 x D b_ext = tf.expand_dims(b, 1) # N x 1",
"[N x M x D]): point coordinates, N is batch size, M is",
"b_j^2 - 2 * a_i * b_j a_ext = tf.expand_dims(a, 2) # N",
"[N x K x M]: pairwise L1 distance between each pair of points.",
"the euclidian space. Returns: Tensor [N x K x M]: pairwise L1 distance",
"size, M is the number of points in a batch, D is the",
"K is the number of points in a batch, D is the dimension",
"x M]: pairwise L-infinity distance between each pair of points. \"\"\" a =",
"x K x 1 b2 = tf.reduce_sum(b_ext * b_ext, axis=3) # N x",
"batch, D is the dimension of the euclidian space. sqrt (bool, optional): whether",
"M if sqrt: return L2_square ** 0.5 else: return L2_square def pairwise_l1_distance(a, b):",
"2, 1))) # N x K x M L2_square = a2 + b2",
"N x K x M def pairwise_l_inf_distance(a, b): \"\"\"Compute pairwise L∞ distance between",
"L1 distance between all points in A and B. L1 norm is ``sum(|x_i",
"is batch size, M is the number of points in a batch, D",
"+ b2 - 2 * ab # N x K x M if",
"= tf.matmul(a, tf.transpose(b, (0, 2, 1))) # N x K x M L2_square",
"x M def pairwise_l_inf_distance(a, b): \"\"\"Compute pairwise L∞ distance between all points in",
"1) # N x 1 x M x D return tf.reduce_max(tf.abs(a_ext - b_ext),",
"M]: pairwise L1 distance between each pair of points. \"\"\" a = tf.convert_to_tensor(a)",
"def pairwise_l2_distance(a, b, sqrt=True): \"\"\"Compute pairwise L2 distance between all points in A",
"of the euclidian space. Returns: Tensor [N x K x M]: pairwise L-infinity",
"number of points in a batch, D is the dimension of the euclidian",
"x 1 x M x D return tf.reduce_max(tf.abs(a_ext - b_ext), axis=3) # N",
"x K x 1 x D b_ext = tf.expand_dims(b, 1) # N x",
"a_i^2 + b_j^2 - 2 * a_i * b_j a_ext = tf.expand_dims(a, 2)",
"the Euclidian space. b (Tensor [N x M x D]): point coordinates, N",
"L-infinity is ``max(|x_i - y_i|)`` Args: a (Tensor [N x K x D]):",
"Returns: Tensor [N x K x M]: pairwise L-infinity distance between each pair",
"x M]: pairwise L2 distance between each pair of points. \"\"\" a =",
"is the number of points in a batch, D is the dimension of",
"# N x K x M L2_square = a2 + b2 - 2",
"pairwise L1 distance between all points in A and B. L1 norm is",
"x K x D]): point coordinates. N is batch size, K is the",
"K x M]: pairwise L2 distance between each pair of points. \"\"\" a",
"dimension of the euclidian space. Returns: Tensor [N x K x M]: pairwise",
"- 2 * a_i * b_j a_ext = tf.expand_dims(a, 2) # N x",
"pairwise_l1_distance(a, b): \"\"\"Compute pairwise L1 distance between all points in A and B.",
"1 x M x D return tf.reduce_max(tf.abs(a_ext - b_ext), axis=3) # N x",
"N x K x 1 b2 = tf.reduce_sum(b_ext * b_ext, axis=3) # N",
"0.5 else: return L2_square def pairwise_l1_distance(a, b): \"\"\"Compute pairwise L1 distance between all",
"\"\"\"Point operations =================== Batch operations on points in D-dimensional Eucledian space R^D. \"\"\"",
"a batch, D is the dimension of the euclidian space. sqrt (bool, optional):",
"euclidian space. Returns: Tensor [N x K x M]: pairwise L-infinity distance between",
"\"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) # (a_i - b_j)^2 = a_i^2",
"x K x M if sqrt: return L2_square ** 0.5 else: return L2_square",
"D is the dimension of the Euclidian space. b (Tensor [N x M",
"# N x K x M def pairwise_l_inf_distance(a, b): \"\"\"Compute pairwise L∞ distance",
"operations on points in D-dimensional Eucledian space R^D. \"\"\" import tensorflow as tf",
"N x K x M L2_square = a2 + b2 - 2 *",
"b): \"\"\"Compute pairwise L1 distance between all points in A and B. L1",
"* ab # N x K x M if sqrt: return L2_square **",
"# N x 1 x M ab = tf.matmul(a, tf.transpose(b, (0, 2, 1)))",
"D is the dimension of the euclidian space. Returns: Tensor [N x K",
"sqrt (bool, optional): whether take the square root. Defaults to True. Returns: Tensor",
"= a_i^2 + b_j^2 - 2 * a_i * b_j a_ext = tf.expand_dims(a,",
"if sqrt: return L2_square ** 0.5 else: return L2_square def pairwise_l1_distance(a, b): \"\"\"Compute",
"2 * ab # N x K x M if sqrt: return L2_square",
"tf.expand_dims(b, 1) # N x 1 x M x D return tf.reduce_sum(tf.abs(a_ext -",
"(bool, optional): whether take the square root. Defaults to True. Returns: Tensor [N",
"K x M if sqrt: return L2_square ** 0.5 else: return L2_square def",
"A and B. L1 norm is ``sum(|x_i - y_i|)`` Args: a (Tensor [N",
"a_i * b_j a_ext = tf.expand_dims(a, 2) # N x K x 1",
"x K x M]: pairwise L-infinity distance between each pair of points. \"\"\"",
"tf.convert_to_tensor(b) a_ext = tf.expand_dims(a, 2) # N x K x 1 x D",
"1) # N x 1 x M x D return tf.reduce_sum(tf.abs(a_ext - b_ext),",
"pairwise_l_inf_distance(a, b): \"\"\"Compute pairwise L∞ distance between all points in A and B.",
"points in D-dimensional Eucledian space R^D. \"\"\" import tensorflow as tf def pairwise_l2_distance(a,",
"is batch size, K is the number of points in a batch, D",
"optional): whether take the square root. Defaults to True. Returns: Tensor [N x",
"* a_ext, axis=3) # N x K x 1 b2 = tf.reduce_sum(b_ext *",
"2))`` Args: a (Tensor [N x K x D]): point coordinates. N is",
"return L2_square def pairwise_l1_distance(a, b): \"\"\"Compute pairwise L1 distance between all points in",
"a (Tensor [N x K x D]): point coordinates. N is batch size,",
"L∞ distance between all points in A and B. L-infinity is ``max(|x_i -",
"= tf.convert_to_tensor(b) a_ext = tf.expand_dims(a, 2) # N x K x 1 x",
"= tf.expand_dims(b, 1) # N x 1 x M x D return tf.reduce_sum(tf.abs(a_ext",
"axis=3) # N x K x 1 b2 = tf.reduce_sum(b_ext * b_ext, axis=3)",
"between each pair of points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) #",
"a_ext = tf.expand_dims(a, 2) # N x K x 1 x D b_ext",
"D]): point coordinates, N is batch size, M is the number of points",
"all points in A and B. L2 norm is ``sqrt(sum(|x_i - y_i| ^",
"tf.reduce_sum(b_ext * b_ext, axis=3) # N x 1 x M ab = tf.matmul(a,",
"b_ext), axis=3) # N x K x M def pairwise_l_inf_distance(a, b): \"\"\"Compute pairwise",
"x D a2 = tf.reduce_sum(a_ext * a_ext, axis=3) # N x K x",
"in a batch, D is the dimension of the euclidian space. Returns: Tensor",
"K x M]: pairwise L1 distance between each pair of points. \"\"\" a",
"tf.expand_dims(b, 1) # N x 1 x M x D return tf.reduce_max(tf.abs(a_ext -",
"euclidian space. Returns: Tensor [N x K x M]: pairwise L1 distance between",
"distance between all points in A and B. L1 norm is ``sum(|x_i -",
"distance between each pair of points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b)",
"batch size, K is the number of points in a batch, D is",
"N is batch size, K is the number of points in a batch,",
"dimension of the Euclidian space. b (Tensor [N x M x D]): point",
"- 2 * ab # N x K x M if sqrt: return",
"y_i|)`` Args: a (Tensor [N x K x D]): point coordinates. N is",
"tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) # (a_i - b_j)^2 = a_i^2 + b_j^2 -",
"# N x K x M if sqrt: return L2_square ** 0.5 else:",
"of points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) a_ext = tf.expand_dims(a, 2)",
"1))) # N x K x M L2_square = a2 + b2 -",
"b_ext, axis=3) # N x 1 x M ab = tf.matmul(a, tf.transpose(b, (0,",
"= tf.expand_dims(a, 2) # N x K x 1 x D b_ext =",
"K x M]: pairwise L-infinity distance between each pair of points. \"\"\" a",
"each pair of points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) # (a_i",
"=================== Batch operations on points in D-dimensional Eucledian space R^D. \"\"\" import tensorflow",
"\"\"\" import tensorflow as tf def pairwise_l2_distance(a, b, sqrt=True): \"\"\"Compute pairwise L2 distance",
"Tensor [N x K x M]: pairwise L2 distance between each pair of",
"** 0.5 else: return L2_square def pairwise_l1_distance(a, b): \"\"\"Compute pairwise L1 distance between",
"x D return tf.reduce_sum(tf.abs(a_ext - b_ext), axis=3) # N x K x M",
"+ b_j^2 - 2 * a_i * b_j a_ext = tf.expand_dims(a, 2) #",
"a2 + b2 - 2 * ab # N x K x M",
"tensorflow as tf def pairwise_l2_distance(a, b, sqrt=True): \"\"\"Compute pairwise L2 distance between all",
"= a2 + b2 - 2 * ab # N x K x",
"pairwise L∞ distance between all points in A and B. L-infinity is ``max(|x_i",
"tf def pairwise_l2_distance(a, b, sqrt=True): \"\"\"Compute pairwise L2 distance between all points in",
"pair of points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) # (a_i -",
"Eucledian space R^D. \"\"\" import tensorflow as tf def pairwise_l2_distance(a, b, sqrt=True): \"\"\"Compute",
"K x D]): point coordinates. N is batch size, K is the number",
"M def pairwise_l_inf_distance(a, b): \"\"\"Compute pairwise L∞ distance between all points in A",
"all points in A and B. L1 norm is ``sum(|x_i - y_i|)`` Args:",
"points in A and B. L2 norm is ``sqrt(sum(|x_i - y_i| ^ 2))``",
"size, K is the number of points in a batch, D is the",
"(Tensor [N x K x D]): point coordinates. N is batch size, K",
"in A and B. L1 norm is ``sum(|x_i - y_i|)`` Args: a (Tensor",
"batch, D is the dimension of the euclidian space. Returns: Tensor [N x",
"whether take the square root. Defaults to True. Returns: Tensor [N x K",
"* a_i * b_j a_ext = tf.expand_dims(a, 2) # N x K x",
"Batch operations on points in D-dimensional Eucledian space R^D. \"\"\" import tensorflow as",
"x M x D]): point coordinates, N is batch size, M is the",
"sqrt=True): \"\"\"Compute pairwise L2 distance between all points in A and B. L2",
"is the dimension of the euclidian space. Returns: Tensor [N x K x",
"M]: pairwise L2 distance between each pair of points. \"\"\" a = tf.convert_to_tensor(a)",
"* b_ext, axis=3) # N x 1 x M ab = tf.matmul(a, tf.transpose(b,",
"space. sqrt (bool, optional): whether take the square root. Defaults to True. Returns:",
"B. L2 norm is ``sqrt(sum(|x_i - y_i| ^ 2))`` Args: a (Tensor [N",
"is ``max(|x_i - y_i|)`` Args: a (Tensor [N x K x D]): point",
"the dimension of the euclidian space. sqrt (bool, optional): whether take the square",
"K x 1 b2 = tf.reduce_sum(b_ext * b_ext, axis=3) # N x 1",
"b_j a_ext = tf.expand_dims(a, 2) # N x K x 1 x D",
"ab # N x K x M if sqrt: return L2_square ** 0.5",
"tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) a_ext = tf.expand_dims(a, 2) # N x K x",
"True. Returns: Tensor [N x K x M]: pairwise L2 distance between each",
"L2_square ** 0.5 else: return L2_square def pairwise_l1_distance(a, b): \"\"\"Compute pairwise L1 distance",
"Returns: Tensor [N x K x M]: pairwise L1 distance between each pair",
"on points in D-dimensional Eucledian space R^D. \"\"\" import tensorflow as tf def",
"D is the dimension of the euclidian space. sqrt (bool, optional): whether take",
"1 x M ab = tf.matmul(a, tf.transpose(b, (0, 2, 1))) # N x",
"a batch, D is the dimension of the euclidian space. Returns: Tensor [N",
"x 1 b2 = tf.reduce_sum(b_ext * b_ext, axis=3) # N x 1 x",
"# N x 1 x M x D return tf.reduce_max(tf.abs(a_ext - b_ext), axis=3)",
"2) # N x K x 1 x D b_ext = tf.expand_dims(b, 1)",
"x D]): point coordinates, N is batch size, M is the number of",
"points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) a_ext = tf.expand_dims(a, 2) #",
"a batch, D is the dimension of the Euclidian space. b (Tensor [N",
"x M L2_square = a2 + b2 - 2 * ab # N",
"y_i| ^ 2))`` Args: a (Tensor [N x K x D]): point coordinates.",
"M is the number of points in a batch, D is the dimension",
"x M ab = tf.matmul(a, tf.transpose(b, (0, 2, 1))) # N x K",
"space. b (Tensor [N x M x D]): point coordinates, N is batch",
"x M x D return tf.reduce_sum(tf.abs(a_ext - b_ext), axis=3) # N x K",
"is ``sqrt(sum(|x_i - y_i| ^ 2))`` Args: a (Tensor [N x K x",
"# N x K x 1 x D b_ext = tf.expand_dims(b, 1) #",
"1 x M x D return tf.reduce_sum(tf.abs(a_ext - b_ext), axis=3) # N x",
"tf.convert_to_tensor(b) # (a_i - b_j)^2 = a_i^2 + b_j^2 - 2 * a_i",
"the euclidian space. Returns: Tensor [N x K x M]: pairwise L-infinity distance",
"<reponame>pshved/tensorbank \"\"\"Point operations =================== Batch operations on points in D-dimensional Eucledian space R^D.",
"as tf def pairwise_l2_distance(a, b, sqrt=True): \"\"\"Compute pairwise L2 distance between all points",
"``sqrt(sum(|x_i - y_i| ^ 2))`` Args: a (Tensor [N x K x D]):",
"between all points in A and B. L2 norm is ``sqrt(sum(|x_i - y_i|",
"return L2_square ** 0.5 else: return L2_square def pairwise_l1_distance(a, b): \"\"\"Compute pairwise L1",
"else: return L2_square def pairwise_l1_distance(a, b): \"\"\"Compute pairwise L1 distance between all points",
"the dimension of the euclidian space. Returns: Tensor [N x K x M]:",
"^ 2))`` Args: a (Tensor [N x K x D]): point coordinates. N",
"to True. Returns: Tensor [N x K x M]: pairwise L2 distance between",
"x M]: pairwise L1 distance between each pair of points. \"\"\" a =",
"Euclidian space. b (Tensor [N x M x D]): point coordinates, N is",
"= tf.reduce_sum(b_ext * b_ext, axis=3) # N x 1 x M ab =",
"each pair of points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) a_ext =",
"is the dimension of the euclidian space. sqrt (bool, optional): whether take the",
"b = tf.convert_to_tensor(b) a_ext = tf.expand_dims(a, 2) # N x K x 1",
"space. Returns: Tensor [N x K x M]: pairwise L-infinity distance between each",
"the square root. Defaults to True. Returns: Tensor [N x K x M]:",
"M L2_square = a2 + b2 - 2 * ab # N x",
"b (Tensor [N x M x D]): point coordinates, N is batch size,",
"D]): point coordinates. N is batch size, K is the number of points",
"x K x M]: pairwise L2 distance between each pair of points. \"\"\"",
"a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) # (a_i - b_j)^2 = a_i^2 +",
"pair of points. \"\"\" a = tf.convert_to_tensor(a) b = tf.convert_to_tensor(b) a_ext = tf.expand_dims(a,",
"\"\"\"Compute pairwise L∞ distance between all points in A and B. L-infinity is",
"the euclidian space. sqrt (bool, optional): whether take the square root. Defaults to",
"pairwise_l2_distance(a, b, sqrt=True): \"\"\"Compute pairwise L2 distance between all points in A and",
"a2 = tf.reduce_sum(a_ext * a_ext, axis=3) # N x K x 1 b2",
"batch, D is the dimension of the Euclidian space. b (Tensor [N x",
"root. Defaults to True. Returns: Tensor [N x K x M]: pairwise L2",
"in A and B. L2 norm is ``sqrt(sum(|x_i - y_i| ^ 2))`` Args:",
"L1 norm is ``sum(|x_i - y_i|)`` Args: a (Tensor [N x K x",
"x K x M L2_square = a2 + b2 - 2 * ab",
"points in A and B. L1 norm is ``sum(|x_i - y_i|)`` Args: a",
"distance between all points in A and B. L2 norm is ``sqrt(sum(|x_i -",
"import tensorflow as tf def pairwise_l2_distance(a, b, sqrt=True): \"\"\"Compute pairwise L2 distance between",
"x D return tf.reduce_max(tf.abs(a_ext - b_ext), axis=3) # N x K x M",
"D a2 = tf.reduce_sum(a_ext * a_ext, axis=3) # N x K x 1",
"pairwise L-infinity distance between each pair of points. \"\"\" a = tf.convert_to_tensor(a) b",
"- b_ext), axis=3) # N x K x M def pairwise_l_inf_distance(a, b): \"\"\"Compute",
"x 1 x M ab = tf.matmul(a, tf.transpose(b, (0, 2, 1))) # N",
"x D b_ext = tf.expand_dims(b, 1) # N x 1 x M x",
"point coordinates, N is batch size, M is the number of points in",
"of the euclidian space. Returns: Tensor [N x K x M]: pairwise L1",
"M x D return tf.reduce_sum(tf.abs(a_ext - b_ext), axis=3) # N x K x"
] |
[
"from typing import Dict, Any RawSchema = Dict[str, Any] Field = Dict[str, Any]"
] |
[
"\"\"\"Password Change Confirmation\"\"\" FORGET_MY_PASSWORD_CONTENT = \"\"\"hello {user},\\n Please click the following link to",
"the following link to change your password:\\n http://jinchispace.com:5001/newpass?token={token}\\n This link will be expired",
"link to change your password:\\n http://jinchispace.com:5001/newpass?token={token}\\n This link will be expired in {minutes}",
"= \"\"\"hello {user},\\n Please click the following link to change your password:\\n http://jinchispace.com:5001/newpass?token={token}\\n",
"{user},\\n Please click the following link to change your password:\\n http://jinchispace.com:5001/newpass?token={token}\\n This link",
"FORGET_MY_PASSWORD_SUBJECT = \"\"\"Password Change Confirmation\"\"\" FORGET_MY_PASSWORD_CONTENT = \"\"\"hello {user},\\n Please click the following",
"Please click the following link to change your password:\\n http://jinchispace.com:5001/newpass?token={token}\\n This link will",
"click the following link to change your password:\\n http://jinchispace.com:5001/newpass?token={token}\\n This link will be",
"Change Confirmation\"\"\" FORGET_MY_PASSWORD_CONTENT = \"\"\"hello {user},\\n Please click the following link to change",
"FORGET_MY_PASSWORD_CONTENT = \"\"\"hello {user},\\n Please click the following link to change your password:\\n",
"= \"\"\"Password Change Confirmation\"\"\" FORGET_MY_PASSWORD_CONTENT = \"\"\"hello {user},\\n Please click the following link",
"following link to change your password:\\n http://jinchispace.com:5001/newpass?token={token}\\n This link will be expired in",
"\"\"\"hello {user},\\n Please click the following link to change your password:\\n http://jinchispace.com:5001/newpass?token={token}\\n This",
"Confirmation\"\"\" FORGET_MY_PASSWORD_CONTENT = \"\"\"hello {user},\\n Please click the following link to change your",
"to change your password:\\n http://jinchispace.com:5001/newpass?token={token}\\n This link will be expired in {minutes} minutes.\"\"\""
] |
[
"a JSON-formatted push policy to serve\") @command.argument(\"--cert_path\", help=\"Location of the server certificate\") @command.argument(\"--key_path\",",
"type=int, default=None ) @command.argument( \"--extract_critical_requests\", help=\"true or false to specify if server should",
"help=\"Do not cache objects which expire in less than this time (in seconds)\",",
") @command.command def replay(args): \"\"\" Starts a replay environment for the given replay",
"critical request extractor\", action=\"store_true\", ) @command.command def replay(args): \"\"\" Starts a replay environment",
"\"\"\" Starts a replay environment for the given replay directory, including setting up",
"policy_dict = json.load(policy_file) policy = Policy.from_dict(policy_dict) with start_server( args.replay_dir, cert_path, key_path, policy, cache_time=args.cache_time,",
"the requests \"\"\" policy = None cert_path = os.path.abspath(args.cert_path) if args.cert_path else None",
"\"--extract_critical_requests\", help=\"true or false to specify if server should inject critical request extractor\",",
"than this time (in seconds)\", type=int, default=None ) @command.argument( \"--extract_critical_requests\", help=\"true or false",
"as log from blaze.mahimahi.server import start_server from . import command @command.argument(\"replay_dir\", help=\"The directory",
"push policy to serve\") @command.argument(\"--cert_path\", help=\"Location of the server certificate\") @command.argument(\"--key_path\", help=\"Location of",
"requests \"\"\" policy = None cert_path = os.path.abspath(args.cert_path) if args.cert_path else None key_path",
"viewing and manipulating the training manifest \"\"\" import json import time import os",
"server key\") @command.argument( \"--cache_time\", help=\"Do not cache objects which expire in less than",
"@command.argument(\"--policy\", help=\"The file path to a JSON-formatted push policy to serve\") @command.argument(\"--cert_path\", help=\"Location",
"@command.argument( \"--cache_time\", help=\"Do not cache objects which expire in less than this time",
") @command.argument( \"--extract_critical_requests\", help=\"true or false to specify if server should inject critical",
"= os.path.abspath(args.cert_path) if args.cert_path else None key_path = os.path.abspath(args.key_path) if args.key_path else None",
"import command @command.argument(\"replay_dir\", help=\"The directory containing the save files captured by mahimahi\") @command.argument(\"--policy\",",
"None if args.policy: log.debug(\"reading policy\", push_policy=args.policy) with open(args.policy, \"r\") as policy_file: policy_dict =",
"action=\"store_true\", ) @command.command def replay(args): \"\"\" Starts a replay environment for the given",
"for viewing and manipulating the training manifest \"\"\" import json import time import",
"def replay(args): \"\"\" Starts a replay environment for the given replay directory, including",
"for the given replay directory, including setting up interfaces, running a DNS server,",
"interfaces, running a DNS server, and configuring and running an nginx server to",
"save files captured by mahimahi\") @command.argument(\"--policy\", help=\"The file path to a JSON-formatted push",
"blaze.mahimahi.server import start_server from . import command @command.argument(\"replay_dir\", help=\"The directory containing the save",
"nginx server to serve the requests \"\"\" policy = None cert_path = os.path.abspath(args.cert_path)",
"Policy.from_dict(policy_dict) with start_server( args.replay_dir, cert_path, key_path, policy, cache_time=args.cache_time, extract_critical_requests=args.extract_critical_requests, ): while True: time.sleep(86400)",
"server should inject critical request extractor\", action=\"store_true\", ) @command.command def replay(args): \"\"\" Starts",
"policy_file: policy_dict = json.load(policy_file) policy = Policy.from_dict(policy_dict) with start_server( args.replay_dir, cert_path, key_path, policy,",
"the training manifest \"\"\" import json import time import os from blaze.action import",
"serve\") @command.argument(\"--cert_path\", help=\"Location of the server certificate\") @command.argument(\"--key_path\", help=\"Location of the server key\")",
"or false to specify if server should inject critical request extractor\", action=\"store_true\", )",
"up interfaces, running a DNS server, and configuring and running an nginx server",
"directory containing the save files captured by mahimahi\") @command.argument(\"--policy\", help=\"The file path to",
"else None key_path = os.path.abspath(args.key_path) if args.key_path else None if args.policy: log.debug(\"reading policy\",",
"\"\"\" Implements the commands for viewing and manipulating the training manifest \"\"\" import",
"None cert_path = os.path.abspath(args.cert_path) if args.cert_path else None key_path = os.path.abspath(args.key_path) if args.key_path",
"import time import os from blaze.action import Policy from blaze.logger import logger as",
"time import os from blaze.action import Policy from blaze.logger import logger as log",
"command @command.argument(\"replay_dir\", help=\"The directory containing the save files captured by mahimahi\") @command.argument(\"--policy\", help=\"The",
"if server should inject critical request extractor\", action=\"store_true\", ) @command.command def replay(args): \"\"\"",
"from blaze.action import Policy from blaze.logger import logger as log from blaze.mahimahi.server import",
"\"--cache_time\", help=\"Do not cache objects which expire in less than this time (in",
"which expire in less than this time (in seconds)\", type=int, default=None ) @command.argument(",
"@command.argument( \"--extract_critical_requests\", help=\"true or false to specify if server should inject critical request",
"to serve the requests \"\"\" policy = None cert_path = os.path.abspath(args.cert_path) if args.cert_path",
"a DNS server, and configuring and running an nginx server to serve the",
"\"r\") as policy_file: policy_dict = json.load(policy_file) policy = Policy.from_dict(policy_dict) with start_server( args.replay_dir, cert_path,",
"import logger as log from blaze.mahimahi.server import start_server from . import command @command.argument(\"replay_dir\",",
"less than this time (in seconds)\", type=int, default=None ) @command.argument( \"--extract_critical_requests\", help=\"true or",
"json import time import os from blaze.action import Policy from blaze.logger import logger",
"logger as log from blaze.mahimahi.server import start_server from . import command @command.argument(\"replay_dir\", help=\"The",
"false to specify if server should inject critical request extractor\", action=\"store_true\", ) @command.command",
"the commands for viewing and manipulating the training manifest \"\"\" import json import",
"import os from blaze.action import Policy from blaze.logger import logger as log from",
"in less than this time (in seconds)\", type=int, default=None ) @command.argument( \"--extract_critical_requests\", help=\"true",
"= json.load(policy_file) policy = Policy.from_dict(policy_dict) with start_server( args.replay_dir, cert_path, key_path, policy, cache_time=args.cache_time, extract_critical_requests=args.extract_critical_requests,",
"path to a JSON-formatted push policy to serve\") @command.argument(\"--cert_path\", help=\"Location of the server",
"open(args.policy, \"r\") as policy_file: policy_dict = json.load(policy_file) policy = Policy.from_dict(policy_dict) with start_server( args.replay_dir,",
"policy = Policy.from_dict(policy_dict) with start_server( args.replay_dir, cert_path, key_path, policy, cache_time=args.cache_time, extract_critical_requests=args.extract_critical_requests, ): while",
"request extractor\", action=\"store_true\", ) @command.command def replay(args): \"\"\" Starts a replay environment for",
"replay directory, including setting up interfaces, running a DNS server, and configuring and",
"import json import time import os from blaze.action import Policy from blaze.logger import",
"help=\"true or false to specify if server should inject critical request extractor\", action=\"store_true\",",
"from . import command @command.argument(\"replay_dir\", help=\"The directory containing the save files captured by",
"@command.argument(\"--cert_path\", help=\"Location of the server certificate\") @command.argument(\"--key_path\", help=\"Location of the server key\") @command.argument(",
"by mahimahi\") @command.argument(\"--policy\", help=\"The file path to a JSON-formatted push policy to serve\")",
"cache objects which expire in less than this time (in seconds)\", type=int, default=None",
"cert_path = os.path.abspath(args.cert_path) if args.cert_path else None key_path = os.path.abspath(args.key_path) if args.key_path else",
"with open(args.policy, \"r\") as policy_file: policy_dict = json.load(policy_file) policy = Policy.from_dict(policy_dict) with start_server(",
"to specify if server should inject critical request extractor\", action=\"store_true\", ) @command.command def",
"None key_path = os.path.abspath(args.key_path) if args.key_path else None if args.policy: log.debug(\"reading policy\", push_policy=args.policy)",
"log from blaze.mahimahi.server import start_server from . import command @command.argument(\"replay_dir\", help=\"The directory containing",
"if args.policy: log.debug(\"reading policy\", push_policy=args.policy) with open(args.policy, \"r\") as policy_file: policy_dict = json.load(policy_file)",
"this time (in seconds)\", type=int, default=None ) @command.argument( \"--extract_critical_requests\", help=\"true or false to",
"the given replay directory, including setting up interfaces, running a DNS server, and",
"os.path.abspath(args.cert_path) if args.cert_path else None key_path = os.path.abspath(args.key_path) if args.key_path else None if",
"server certificate\") @command.argument(\"--key_path\", help=\"Location of the server key\") @command.argument( \"--cache_time\", help=\"Do not cache",
"log.debug(\"reading policy\", push_policy=args.policy) with open(args.policy, \"r\") as policy_file: policy_dict = json.load(policy_file) policy =",
"Implements the commands for viewing and manipulating the training manifest \"\"\" import json",
"and configuring and running an nginx server to serve the requests \"\"\" policy",
"manifest \"\"\" import json import time import os from blaze.action import Policy from",
"to a JSON-formatted push policy to serve\") @command.argument(\"--cert_path\", help=\"Location of the server certificate\")",
"= None cert_path = os.path.abspath(args.cert_path) if args.cert_path else None key_path = os.path.abspath(args.key_path) if",
"policy\", push_policy=args.policy) with open(args.policy, \"r\") as policy_file: policy_dict = json.load(policy_file) policy = Policy.from_dict(policy_dict)",
"help=\"Location of the server key\") @command.argument( \"--cache_time\", help=\"Do not cache objects which expire",
"if args.cert_path else None key_path = os.path.abspath(args.key_path) if args.key_path else None if args.policy:",
"@command.argument(\"--key_path\", help=\"Location of the server key\") @command.argument( \"--cache_time\", help=\"Do not cache objects which",
"mahimahi\") @command.argument(\"--policy\", help=\"The file path to a JSON-formatted push policy to serve\") @command.argument(\"--cert_path\",",
"help=\"The directory containing the save files captured by mahimahi\") @command.argument(\"--policy\", help=\"The file path",
"configuring and running an nginx server to serve the requests \"\"\" policy =",
"server to serve the requests \"\"\" policy = None cert_path = os.path.abspath(args.cert_path) if",
"should inject critical request extractor\", action=\"store_true\", ) @command.command def replay(args): \"\"\" Starts a",
"and running an nginx server to serve the requests \"\"\" policy = None",
"file path to a JSON-formatted push policy to serve\") @command.argument(\"--cert_path\", help=\"Location of the",
"replay environment for the given replay directory, including setting up interfaces, running a",
"= os.path.abspath(args.key_path) if args.key_path else None if args.policy: log.debug(\"reading policy\", push_policy=args.policy) with open(args.policy,",
"push_policy=args.policy) with open(args.policy, \"r\") as policy_file: policy_dict = json.load(policy_file) policy = Policy.from_dict(policy_dict) with",
"args.cert_path else None key_path = os.path.abspath(args.key_path) if args.key_path else None if args.policy: log.debug(\"reading",
"policy = None cert_path = os.path.abspath(args.cert_path) if args.cert_path else None key_path = os.path.abspath(args.key_path)",
"expire in less than this time (in seconds)\", type=int, default=None ) @command.argument( \"--extract_critical_requests\",",
"not cache objects which expire in less than this time (in seconds)\", type=int,",
"(in seconds)\", type=int, default=None ) @command.argument( \"--extract_critical_requests\", help=\"true or false to specify if",
"a replay environment for the given replay directory, including setting up interfaces, running",
"import start_server from . import command @command.argument(\"replay_dir\", help=\"The directory containing the save files",
"given replay directory, including setting up interfaces, running a DNS server, and configuring",
"manipulating the training manifest \"\"\" import json import time import os from blaze.action",
"commands for viewing and manipulating the training manifest \"\"\" import json import time",
"start_server from . import command @command.argument(\"replay_dir\", help=\"The directory containing the save files captured",
"from blaze.mahimahi.server import start_server from . import command @command.argument(\"replay_dir\", help=\"The directory containing the",
"environment for the given replay directory, including setting up interfaces, running a DNS",
"key\") @command.argument( \"--cache_time\", help=\"Do not cache objects which expire in less than this",
"server, and configuring and running an nginx server to serve the requests \"\"\"",
"Policy from blaze.logger import logger as log from blaze.mahimahi.server import start_server from .",
"the server certificate\") @command.argument(\"--key_path\", help=\"Location of the server key\") @command.argument( \"--cache_time\", help=\"Do not",
"os from blaze.action import Policy from blaze.logger import logger as log from blaze.mahimahi.server",
"extractor\", action=\"store_true\", ) @command.command def replay(args): \"\"\" Starts a replay environment for the",
"from blaze.logger import logger as log from blaze.mahimahi.server import start_server from . import",
"JSON-formatted push policy to serve\") @command.argument(\"--cert_path\", help=\"Location of the server certificate\") @command.argument(\"--key_path\", help=\"Location",
". import command @command.argument(\"replay_dir\", help=\"The directory containing the save files captured by mahimahi\")",
"\"\"\" policy = None cert_path = os.path.abspath(args.cert_path) if args.cert_path else None key_path =",
"args.policy: log.debug(\"reading policy\", push_policy=args.policy) with open(args.policy, \"r\") as policy_file: policy_dict = json.load(policy_file) policy",
"an nginx server to serve the requests \"\"\" policy = None cert_path =",
"help=\"The file path to a JSON-formatted push policy to serve\") @command.argument(\"--cert_path\", help=\"Location of",
"containing the save files captured by mahimahi\") @command.argument(\"--policy\", help=\"The file path to a",
"of the server certificate\") @command.argument(\"--key_path\", help=\"Location of the server key\") @command.argument( \"--cache_time\", help=\"Do",
"the save files captured by mahimahi\") @command.argument(\"--policy\", help=\"The file path to a JSON-formatted",
"captured by mahimahi\") @command.argument(\"--policy\", help=\"The file path to a JSON-formatted push policy to",
"of the server key\") @command.argument( \"--cache_time\", help=\"Do not cache objects which expire in",
"default=None ) @command.argument( \"--extract_critical_requests\", help=\"true or false to specify if server should inject",
"certificate\") @command.argument(\"--key_path\", help=\"Location of the server key\") @command.argument( \"--cache_time\", help=\"Do not cache objects",
"the server key\") @command.argument( \"--cache_time\", help=\"Do not cache objects which expire in less",
"if args.key_path else None if args.policy: log.debug(\"reading policy\", push_policy=args.policy) with open(args.policy, \"r\") as",
"blaze.logger import logger as log from blaze.mahimahi.server import start_server from . import command",
"files captured by mahimahi\") @command.argument(\"--policy\", help=\"The file path to a JSON-formatted push policy",
"blaze.action import Policy from blaze.logger import logger as log from blaze.mahimahi.server import start_server",
"else None if args.policy: log.debug(\"reading policy\", push_policy=args.policy) with open(args.policy, \"r\") as policy_file: policy_dict",
"replay(args): \"\"\" Starts a replay environment for the given replay directory, including setting",
"time (in seconds)\", type=int, default=None ) @command.argument( \"--extract_critical_requests\", help=\"true or false to specify",
"Starts a replay environment for the given replay directory, including setting up interfaces,",
"training manifest \"\"\" import json import time import os from blaze.action import Policy",
"inject critical request extractor\", action=\"store_true\", ) @command.command def replay(args): \"\"\" Starts a replay",
"@command.command def replay(args): \"\"\" Starts a replay environment for the given replay directory,",
"= Policy.from_dict(policy_dict) with start_server( args.replay_dir, cert_path, key_path, policy, cache_time=args.cache_time, extract_critical_requests=args.extract_critical_requests, ): while True:",
"\"\"\" import json import time import os from blaze.action import Policy from blaze.logger",
"to serve\") @command.argument(\"--cert_path\", help=\"Location of the server certificate\") @command.argument(\"--key_path\", help=\"Location of the server",
"policy to serve\") @command.argument(\"--cert_path\", help=\"Location of the server certificate\") @command.argument(\"--key_path\", help=\"Location of the",
"os.path.abspath(args.key_path) if args.key_path else None if args.policy: log.debug(\"reading policy\", push_policy=args.policy) with open(args.policy, \"r\")",
"running a DNS server, and configuring and running an nginx server to serve",
"serve the requests \"\"\" policy = None cert_path = os.path.abspath(args.cert_path) if args.cert_path else",
"json.load(policy_file) policy = Policy.from_dict(policy_dict) with start_server( args.replay_dir, cert_path, key_path, policy, cache_time=args.cache_time, extract_critical_requests=args.extract_critical_requests, ):",
"setting up interfaces, running a DNS server, and configuring and running an nginx",
"seconds)\", type=int, default=None ) @command.argument( \"--extract_critical_requests\", help=\"true or false to specify if server",
"directory, including setting up interfaces, running a DNS server, and configuring and running",
"including setting up interfaces, running a DNS server, and configuring and running an",
"help=\"Location of the server certificate\") @command.argument(\"--key_path\", help=\"Location of the server key\") @command.argument( \"--cache_time\",",
"key_path = os.path.abspath(args.key_path) if args.key_path else None if args.policy: log.debug(\"reading policy\", push_policy=args.policy) with",
"objects which expire in less than this time (in seconds)\", type=int, default=None )",
"specify if server should inject critical request extractor\", action=\"store_true\", ) @command.command def replay(args):",
"and manipulating the training manifest \"\"\" import json import time import os from",
"import Policy from blaze.logger import logger as log from blaze.mahimahi.server import start_server from",
"@command.argument(\"replay_dir\", help=\"The directory containing the save files captured by mahimahi\") @command.argument(\"--policy\", help=\"The file",
"running an nginx server to serve the requests \"\"\" policy = None cert_path",
"as policy_file: policy_dict = json.load(policy_file) policy = Policy.from_dict(policy_dict) with start_server( args.replay_dir, cert_path, key_path,",
"DNS server, and configuring and running an nginx server to serve the requests",
"args.key_path else None if args.policy: log.debug(\"reading policy\", push_policy=args.policy) with open(args.policy, \"r\") as policy_file:"
] |
[
"RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\", password=\"<PASSWORD>\") ] @gemstone.event_handler(\"test\") def broadcast_msg(self, message): print(message) if __name__ ==",
"from gemstone.event.transport import RabbitMqEventTransport class ConsumerService(gemstone.MicroService): name = \"consumer\" port = 8000 event_transports",
"import RabbitMqEventTransport class ConsumerService(gemstone.MicroService): name = \"consumer\" port = 8000 event_transports = [",
"<gh_stars>1-10 import gemstone from gemstone.event.transport import RabbitMqEventTransport class ConsumerService(gemstone.MicroService): name = \"consumer\" port",
"event_transports = [ RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\", password=\"<PASSWORD>\") ] @gemstone.event_handler(\"test\") def broadcast_msg(self, message): print(message)",
"gemstone from gemstone.event.transport import RabbitMqEventTransport class ConsumerService(gemstone.MicroService): name = \"consumer\" port = 8000",
"8000 event_transports = [ RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\", password=\"<PASSWORD>\") ] @gemstone.event_handler(\"test\") def broadcast_msg(self, message):",
"username=\"admin\", password=\"<PASSWORD>\") ] @gemstone.event_handler(\"test\") def broadcast_msg(self, message): print(message) if __name__ == '__main__': ConsumerService().start()",
"5672, username=\"admin\", password=\"<PASSWORD>\") ] @gemstone.event_handler(\"test\") def broadcast_msg(self, message): print(message) if __name__ == '__main__':",
"port = 8000 event_transports = [ RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\", password=\"<PASSWORD>\") ] @gemstone.event_handler(\"test\") def",
"= \"consumer\" port = 8000 event_transports = [ RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\", password=\"<PASSWORD>\") ]",
"name = \"consumer\" port = 8000 event_transports = [ RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\", password=\"<PASSWORD>\")",
"= 8000 event_transports = [ RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\", password=\"<PASSWORD>\") ] @gemstone.event_handler(\"test\") def broadcast_msg(self,",
"class ConsumerService(gemstone.MicroService): name = \"consumer\" port = 8000 event_transports = [ RabbitMqEventTransport(\"192.168.1.71\", 5672,",
"= [ RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\", password=\"<PASSWORD>\") ] @gemstone.event_handler(\"test\") def broadcast_msg(self, message): print(message) if",
"import gemstone from gemstone.event.transport import RabbitMqEventTransport class ConsumerService(gemstone.MicroService): name = \"consumer\" port =",
"gemstone.event.transport import RabbitMqEventTransport class ConsumerService(gemstone.MicroService): name = \"consumer\" port = 8000 event_transports =",
"RabbitMqEventTransport class ConsumerService(gemstone.MicroService): name = \"consumer\" port = 8000 event_transports = [ RabbitMqEventTransport(\"192.168.1.71\",",
"\"consumer\" port = 8000 event_transports = [ RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\", password=\"<PASSWORD>\") ] @gemstone.event_handler(\"test\")",
"ConsumerService(gemstone.MicroService): name = \"consumer\" port = 8000 event_transports = [ RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\",",
"[ RabbitMqEventTransport(\"192.168.1.71\", 5672, username=\"admin\", password=\"<PASSWORD>\") ] @gemstone.event_handler(\"test\") def broadcast_msg(self, message): print(message) if __name__"
] |
[
"OSError as exc: raise Exception( f\"FileConverter: conversion directory error: `{exc!r}`.\" ) filepath =",
"= ( \"__config\", ) def __init__( self, config: CallRecordsConfig, ) -> None: self.__config",
"async with aiofiles.open(path, mode='rb') as f: content = await f.read() return content async",
"return file if not os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory) except OSError as exc: raise Exception(",
"exc: raise Exception( f\"FileConverter: conversion directory error: `{exc!r}`.\" ) filepath = os.path.join(self.__config.tmp_directory, file.name)",
"elif file.type == Filetype.WAVE: audio = AudioSegment.from_WAVE(filepath) else: raise Exception(f\"Non-convertible type: `{file.type}`.\") new_filepath",
"str ) -> bytes: async with aiofiles.open(path, mode='rb') as f: content = await",
"IFileConverter from ...CallRecordsConfig import CallRecordsConfig __all__ = [ \"PydubFileConverter\", ] class PydubFileConverter(IFileConverter): __slots__",
"if new_filetype == Filetype.MP3: new_format = \"mp3\" elif new_filetype == Filetype.WAV: new_format =",
"file.type == new_filetype: return file if not os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory) except OSError as",
"= await f.read() return content async def convert(self, file: File, new_filetype: Filetype) ->",
"audio = AudioSegment.from_WAVE(filepath) else: raise Exception(f\"Non-convertible type: `{file.type}`.\") new_filepath = os.path.join( self.__config.tmp_directory, \"converted_\"",
"file.name) async with aiofiles.open(filepath, mode='wb') as f: await f.write(file.content) if file.type == Filetype.MP3:",
"content = await f.read() return content async def convert(self, file: File, new_filetype: Filetype)",
"new_filetype == Filetype.MP3: new_format = \"mp3\" elif new_filetype == Filetype.WAV: new_format = \"wav\"",
"== Filetype.WAV: new_format = \"wav\" elif new_filetype == Filetype.WAVE: new_format = \"wave\" else:",
"\"converted_\" + file.name, ) if new_filetype == Filetype.MP3: new_format = \"mp3\" elif new_filetype",
"\"wave\" else: raise Exception( f\"Non-convertible type: `{new_filetype}`.\" ) audio.export( new_filepath, format=new_format, bitrate='16k' )",
"f\"Non-convertible type: `{new_filetype}`.\" ) audio.export( new_filepath, format=new_format, bitrate='16k' ) content = await self.__get_content_from_file(new_filepath)",
"= os.path.join(self.__config.tmp_directory, file.name) async with aiofiles.open(filepath, mode='wb') as f: await f.write(file.content) if file.type",
"= AudioSegment.from_WAVE(filepath) else: raise Exception(f\"Non-convertible type: `{file.type}`.\") new_filepath = os.path.join( self.__config.tmp_directory, \"converted_\" +",
"from amocrm_asterisk_ng.domain import File from amocrm_asterisk_ng.domain import Filetype from ..core import IFileConverter from",
"Filetype) -> File: if file.type == new_filetype: return file if not os.path.exists(self.__config.tmp_directory): try:",
"self, path: str ) -> bytes: async with aiofiles.open(path, mode='rb') as f: content",
"f: await f.write(file.content) if file.type == Filetype.MP3: audio = AudioSegment.from_mp3(filepath) elif file.type ==",
"self, config: CallRecordsConfig, ) -> None: self.__config = config async def __get_content_from_file( self,",
"path: str ) -> bytes: async with aiofiles.open(path, mode='rb') as f: content =",
"pydub import AudioSegment from amocrm_asterisk_ng.domain import File from amocrm_asterisk_ng.domain import Filetype from ..core",
"] class PydubFileConverter(IFileConverter): __slots__ = ( \"__config\", ) def __init__( self, config: CallRecordsConfig,",
"PydubFileConverter(IFileConverter): __slots__ = ( \"__config\", ) def __init__( self, config: CallRecordsConfig, ) ->",
") content = await self.__get_content_from_file(new_filepath) os.remove(filepath) os.remove(new_filepath) return File( name=file.name, type=new_filetype, content=content, )",
"import File from amocrm_asterisk_ng.domain import Filetype from ..core import IFileConverter from ...CallRecordsConfig import",
"new_filetype == Filetype.WAVE: new_format = \"wave\" else: raise Exception( f\"Non-convertible type: `{new_filetype}`.\" )",
"Filetype.MP3: new_format = \"mp3\" elif new_filetype == Filetype.WAV: new_format = \"wav\" elif new_filetype",
"new_filetype: return file if not os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory) except OSError as exc: raise",
"AudioSegment.from_wav(filepath) elif file.type == Filetype.WAVE: audio = AudioSegment.from_WAVE(filepath) else: raise Exception(f\"Non-convertible type: `{file.type}`.\")",
"format=new_format, bitrate='16k' ) content = await self.__get_content_from_file(new_filepath) os.remove(filepath) os.remove(new_filepath) return File( name=file.name, type=new_filetype,",
"async with aiofiles.open(filepath, mode='wb') as f: await f.write(file.content) if file.type == Filetype.MP3: audio",
"raise Exception( f\"Non-convertible type: `{new_filetype}`.\" ) audio.export( new_filepath, format=new_format, bitrate='16k' ) content =",
"f.read() return content async def convert(self, file: File, new_filetype: Filetype) -> File: if",
"new_filepath = os.path.join( self.__config.tmp_directory, \"converted_\" + file.name, ) if new_filetype == Filetype.MP3: new_format",
"elif new_filetype == Filetype.WAVE: new_format = \"wave\" else: raise Exception( f\"Non-convertible type: `{new_filetype}`.\"",
"aiofiles.open(path, mode='rb') as f: content = await f.read() return content async def convert(self,",
"os.path.join(self.__config.tmp_directory, file.name) async with aiofiles.open(filepath, mode='wb') as f: await f.write(file.content) if file.type ==",
"from pydub import AudioSegment from amocrm_asterisk_ng.domain import File from amocrm_asterisk_ng.domain import Filetype from",
"`{file.type}`.\") new_filepath = os.path.join( self.__config.tmp_directory, \"converted_\" + file.name, ) if new_filetype == Filetype.MP3:",
"Exception( f\"FileConverter: conversion directory error: `{exc!r}`.\" ) filepath = os.path.join(self.__config.tmp_directory, file.name) async with",
"new_format = \"wav\" elif new_filetype == Filetype.WAVE: new_format = \"wave\" else: raise Exception(",
"== Filetype.WAVE: audio = AudioSegment.from_WAVE(filepath) else: raise Exception(f\"Non-convertible type: `{file.type}`.\") new_filepath = os.path.join(",
"amocrm_asterisk_ng.domain import File from amocrm_asterisk_ng.domain import Filetype from ..core import IFileConverter from ...CallRecordsConfig",
"mode='rb') as f: content = await f.read() return content async def convert(self, file:",
"`{new_filetype}`.\" ) audio.export( new_filepath, format=new_format, bitrate='16k' ) content = await self.__get_content_from_file(new_filepath) os.remove(filepath) os.remove(new_filepath)",
"elif file.type == Filetype.WAV: audio = AudioSegment.from_wav(filepath) elif file.type == Filetype.WAVE: audio =",
"AudioSegment.from_WAVE(filepath) else: raise Exception(f\"Non-convertible type: `{file.type}`.\") new_filepath = os.path.join( self.__config.tmp_directory, \"converted_\" + file.name,",
"= \"wave\" else: raise Exception( f\"Non-convertible type: `{new_filetype}`.\" ) audio.export( new_filepath, format=new_format, bitrate='16k'",
"-> None: self.__config = config async def __get_content_from_file( self, path: str ) ->",
"from amocrm_asterisk_ng.domain import Filetype from ..core import IFileConverter from ...CallRecordsConfig import CallRecordsConfig __all__",
"AudioSegment.from_mp3(filepath) elif file.type == Filetype.WAV: audio = AudioSegment.from_wav(filepath) elif file.type == Filetype.WAVE: audio",
"Filetype.WAV: audio = AudioSegment.from_wav(filepath) elif file.type == Filetype.WAVE: audio = AudioSegment.from_WAVE(filepath) else: raise",
"def convert(self, file: File, new_filetype: Filetype) -> File: if file.type == new_filetype: return",
"AudioSegment from amocrm_asterisk_ng.domain import File from amocrm_asterisk_ng.domain import Filetype from ..core import IFileConverter",
"file.type == Filetype.MP3: audio = AudioSegment.from_mp3(filepath) elif file.type == Filetype.WAV: audio = AudioSegment.from_wav(filepath)",
"-> File: if file.type == new_filetype: return file if not os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory)",
"if file.type == new_filetype: return file if not os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory) except OSError",
"with aiofiles.open(path, mode='rb') as f: content = await f.read() return content async def",
"Exception(f\"Non-convertible type: `{file.type}`.\") new_filepath = os.path.join( self.__config.tmp_directory, \"converted_\" + file.name, ) if new_filetype",
"\"wav\" elif new_filetype == Filetype.WAVE: new_format = \"wave\" else: raise Exception( f\"Non-convertible type:",
"async def convert(self, file: File, new_filetype: Filetype) -> File: if file.type == new_filetype:",
"elif new_filetype == Filetype.WAV: new_format = \"wav\" elif new_filetype == Filetype.WAVE: new_format =",
"-> bytes: async with aiofiles.open(path, mode='rb') as f: content = await f.read() return",
"import CallRecordsConfig __all__ = [ \"PydubFileConverter\", ] class PydubFileConverter(IFileConverter): __slots__ = ( \"__config\",",
"else: raise Exception( f\"Non-convertible type: `{new_filetype}`.\" ) audio.export( new_filepath, format=new_format, bitrate='16k' ) content",
"file.type == Filetype.WAV: audio = AudioSegment.from_wav(filepath) elif file.type == Filetype.WAVE: audio = AudioSegment.from_WAVE(filepath)",
"= AudioSegment.from_wav(filepath) elif file.type == Filetype.WAVE: audio = AudioSegment.from_WAVE(filepath) else: raise Exception(f\"Non-convertible type:",
"`{exc!r}`.\" ) filepath = os.path.join(self.__config.tmp_directory, file.name) async with aiofiles.open(filepath, mode='wb') as f: await",
"audio.export( new_filepath, format=new_format, bitrate='16k' ) content = await self.__get_content_from_file(new_filepath) os.remove(filepath) os.remove(new_filepath) return File(",
"import IFileConverter from ...CallRecordsConfig import CallRecordsConfig __all__ = [ \"PydubFileConverter\", ] class PydubFileConverter(IFileConverter):",
"Filetype.WAVE: audio = AudioSegment.from_WAVE(filepath) else: raise Exception(f\"Non-convertible type: `{file.type}`.\") new_filepath = os.path.join( self.__config.tmp_directory,",
"content async def convert(self, file: File, new_filetype: Filetype) -> File: if file.type ==",
"await f.write(file.content) if file.type == Filetype.MP3: audio = AudioSegment.from_mp3(filepath) elif file.type == Filetype.WAV:",
"= \"wav\" elif new_filetype == Filetype.WAVE: new_format = \"wave\" else: raise Exception( f\"Non-convertible",
"directory error: `{exc!r}`.\" ) filepath = os.path.join(self.__config.tmp_directory, file.name) async with aiofiles.open(filepath, mode='wb') as",
"= [ \"PydubFileConverter\", ] class PydubFileConverter(IFileConverter): __slots__ = ( \"__config\", ) def __init__(",
"Filetype from ..core import IFileConverter from ...CallRecordsConfig import CallRecordsConfig __all__ = [ \"PydubFileConverter\",",
"+ file.name, ) if new_filetype == Filetype.MP3: new_format = \"mp3\" elif new_filetype ==",
"if file.type == Filetype.MP3: audio = AudioSegment.from_mp3(filepath) elif file.type == Filetype.WAV: audio =",
"aiofiles.open(filepath, mode='wb') as f: await f.write(file.content) if file.type == Filetype.MP3: audio = AudioSegment.from_mp3(filepath)",
"os.path.join( self.__config.tmp_directory, \"converted_\" + file.name, ) if new_filetype == Filetype.MP3: new_format = \"mp3\"",
"import AudioSegment from amocrm_asterisk_ng.domain import File from amocrm_asterisk_ng.domain import Filetype from ..core import",
"File from amocrm_asterisk_ng.domain import Filetype from ..core import IFileConverter from ...CallRecordsConfig import CallRecordsConfig",
") if new_filetype == Filetype.MP3: new_format = \"mp3\" elif new_filetype == Filetype.WAV: new_format",
") -> None: self.__config = config async def __get_content_from_file( self, path: str )",
"new_filepath, format=new_format, bitrate='16k' ) content = await self.__get_content_from_file(new_filepath) os.remove(filepath) os.remove(new_filepath) return File( name=file.name,",
"= \"mp3\" elif new_filetype == Filetype.WAV: new_format = \"wav\" elif new_filetype == Filetype.WAVE:",
"aiofiles from pydub import AudioSegment from amocrm_asterisk_ng.domain import File from amocrm_asterisk_ng.domain import Filetype",
"import aiofiles from pydub import AudioSegment from amocrm_asterisk_ng.domain import File from amocrm_asterisk_ng.domain import",
"await f.read() return content async def convert(self, file: File, new_filetype: Filetype) -> File:",
"..core import IFileConverter from ...CallRecordsConfig import CallRecordsConfig __all__ = [ \"PydubFileConverter\", ] class",
"Exception( f\"Non-convertible type: `{new_filetype}`.\" ) audio.export( new_filepath, format=new_format, bitrate='16k' ) content = await",
"__all__ = [ \"PydubFileConverter\", ] class PydubFileConverter(IFileConverter): __slots__ = ( \"__config\", ) def",
"config: CallRecordsConfig, ) -> None: self.__config = config async def __get_content_from_file( self, path:",
"with aiofiles.open(filepath, mode='wb') as f: await f.write(file.content) if file.type == Filetype.MP3: audio =",
"type: `{file.type}`.\") new_filepath = os.path.join( self.__config.tmp_directory, \"converted_\" + file.name, ) if new_filetype ==",
"== Filetype.MP3: audio = AudioSegment.from_mp3(filepath) elif file.type == Filetype.WAV: audio = AudioSegment.from_wav(filepath) elif",
"file if not os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory) except OSError as exc: raise Exception( f\"FileConverter:",
"f.write(file.content) if file.type == Filetype.MP3: audio = AudioSegment.from_mp3(filepath) elif file.type == Filetype.WAV: audio",
"class PydubFileConverter(IFileConverter): __slots__ = ( \"__config\", ) def __init__( self, config: CallRecordsConfig, )",
"config async def __get_content_from_file( self, path: str ) -> bytes: async with aiofiles.open(path,",
"convert(self, file: File, new_filetype: Filetype) -> File: if file.type == new_filetype: return file",
"if not os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory) except OSError as exc: raise Exception( f\"FileConverter: conversion",
") -> bytes: async with aiofiles.open(path, mode='rb') as f: content = await f.read()",
"except OSError as exc: raise Exception( f\"FileConverter: conversion directory error: `{exc!r}`.\" ) filepath",
") audio.export( new_filepath, format=new_format, bitrate='16k' ) content = await self.__get_content_from_file(new_filepath) os.remove(filepath) os.remove(new_filepath) return",
"File: if file.type == new_filetype: return file if not os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory) except",
"Filetype.WAVE: new_format = \"wave\" else: raise Exception( f\"Non-convertible type: `{new_filetype}`.\" ) audio.export( new_filepath,",
"from ...CallRecordsConfig import CallRecordsConfig __all__ = [ \"PydubFileConverter\", ] class PydubFileConverter(IFileConverter): __slots__ =",
"= config async def __get_content_from_file( self, path: str ) -> bytes: async with",
"new_format = \"wave\" else: raise Exception( f\"Non-convertible type: `{new_filetype}`.\" ) audio.export( new_filepath, format=new_format,",
"f: content = await f.read() return content async def convert(self, file: File, new_filetype:",
"new_format = \"mp3\" elif new_filetype == Filetype.WAV: new_format = \"wav\" elif new_filetype ==",
"async def __get_content_from_file( self, path: str ) -> bytes: async with aiofiles.open(path, mode='rb')",
"f\"FileConverter: conversion directory error: `{exc!r}`.\" ) filepath = os.path.join(self.__config.tmp_directory, file.name) async with aiofiles.open(filepath,",
"from ..core import IFileConverter from ...CallRecordsConfig import CallRecordsConfig __all__ = [ \"PydubFileConverter\", ]",
"file: File, new_filetype: Filetype) -> File: if file.type == new_filetype: return file if",
"Filetype.WAV: new_format = \"wav\" elif new_filetype == Filetype.WAVE: new_format = \"wave\" else: raise",
"= AudioSegment.from_mp3(filepath) elif file.type == Filetype.WAV: audio = AudioSegment.from_wav(filepath) elif file.type == Filetype.WAVE:",
"== new_filetype: return file if not os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory) except OSError as exc:",
"os import aiofiles from pydub import AudioSegment from amocrm_asterisk_ng.domain import File from amocrm_asterisk_ng.domain",
"error: `{exc!r}`.\" ) filepath = os.path.join(self.__config.tmp_directory, file.name) async with aiofiles.open(filepath, mode='wb') as f:",
"file.name, ) if new_filetype == Filetype.MP3: new_format = \"mp3\" elif new_filetype == Filetype.WAV:",
"audio = AudioSegment.from_mp3(filepath) elif file.type == Filetype.WAV: audio = AudioSegment.from_wav(filepath) elif file.type ==",
"Filetype.MP3: audio = AudioSegment.from_mp3(filepath) elif file.type == Filetype.WAV: audio = AudioSegment.from_wav(filepath) elif file.type",
"raise Exception(f\"Non-convertible type: `{file.type}`.\") new_filepath = os.path.join( self.__config.tmp_directory, \"converted_\" + file.name, ) if",
"new_filetype: Filetype) -> File: if file.type == new_filetype: return file if not os.path.exists(self.__config.tmp_directory):",
"...CallRecordsConfig import CallRecordsConfig __all__ = [ \"PydubFileConverter\", ] class PydubFileConverter(IFileConverter): __slots__ = (",
"as exc: raise Exception( f\"FileConverter: conversion directory error: `{exc!r}`.\" ) filepath = os.path.join(self.__config.tmp_directory,",
"\"__config\", ) def __init__( self, config: CallRecordsConfig, ) -> None: self.__config = config",
"audio = AudioSegment.from_wav(filepath) elif file.type == Filetype.WAVE: audio = AudioSegment.from_WAVE(filepath) else: raise Exception(f\"Non-convertible",
"CallRecordsConfig __all__ = [ \"PydubFileConverter\", ] class PydubFileConverter(IFileConverter): __slots__ = ( \"__config\", )",
"File, new_filetype: Filetype) -> File: if file.type == new_filetype: return file if not",
"as f: await f.write(file.content) if file.type == Filetype.MP3: audio = AudioSegment.from_mp3(filepath) elif file.type",
"file.type == Filetype.WAVE: audio = AudioSegment.from_WAVE(filepath) else: raise Exception(f\"Non-convertible type: `{file.type}`.\") new_filepath =",
"def __get_content_from_file( self, path: str ) -> bytes: async with aiofiles.open(path, mode='rb') as",
"else: raise Exception(f\"Non-convertible type: `{file.type}`.\") new_filepath = os.path.join( self.__config.tmp_directory, \"converted_\" + file.name, )",
"None: self.__config = config async def __get_content_from_file( self, path: str ) -> bytes:",
"import os import aiofiles from pydub import AudioSegment from amocrm_asterisk_ng.domain import File from",
"__get_content_from_file( self, path: str ) -> bytes: async with aiofiles.open(path, mode='rb') as f:",
"as f: content = await f.read() return content async def convert(self, file: File,",
"[ \"PydubFileConverter\", ] class PydubFileConverter(IFileConverter): __slots__ = ( \"__config\", ) def __init__( self,",
"== Filetype.WAVE: new_format = \"wave\" else: raise Exception( f\"Non-convertible type: `{new_filetype}`.\" ) audio.export(",
") def __init__( self, config: CallRecordsConfig, ) -> None: self.__config = config async",
") filepath = os.path.join(self.__config.tmp_directory, file.name) async with aiofiles.open(filepath, mode='wb') as f: await f.write(file.content)",
"new_filetype == Filetype.WAV: new_format = \"wav\" elif new_filetype == Filetype.WAVE: new_format = \"wave\"",
"self.__config.tmp_directory, \"converted_\" + file.name, ) if new_filetype == Filetype.MP3: new_format = \"mp3\" elif",
"type: `{new_filetype}`.\" ) audio.export( new_filepath, format=new_format, bitrate='16k' ) content = await self.__get_content_from_file(new_filepath) os.remove(filepath)",
"import Filetype from ..core import IFileConverter from ...CallRecordsConfig import CallRecordsConfig __all__ = [",
"( \"__config\", ) def __init__( self, config: CallRecordsConfig, ) -> None: self.__config =",
"os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory) except OSError as exc: raise Exception( f\"FileConverter: conversion directory error:",
"not os.path.exists(self.__config.tmp_directory): try: os.makedirs(self.__config.tmp_directory) except OSError as exc: raise Exception( f\"FileConverter: conversion directory",
"self.__config = config async def __get_content_from_file( self, path: str ) -> bytes: async",
"os.makedirs(self.__config.tmp_directory) except OSError as exc: raise Exception( f\"FileConverter: conversion directory error: `{exc!r}`.\" )",
"raise Exception( f\"FileConverter: conversion directory error: `{exc!r}`.\" ) filepath = os.path.join(self.__config.tmp_directory, file.name) async",
"__init__( self, config: CallRecordsConfig, ) -> None: self.__config = config async def __get_content_from_file(",
"bytes: async with aiofiles.open(path, mode='rb') as f: content = await f.read() return content",
"return content async def convert(self, file: File, new_filetype: Filetype) -> File: if file.type",
"== Filetype.MP3: new_format = \"mp3\" elif new_filetype == Filetype.WAV: new_format = \"wav\" elif",
"amocrm_asterisk_ng.domain import Filetype from ..core import IFileConverter from ...CallRecordsConfig import CallRecordsConfig __all__ =",
"__slots__ = ( \"__config\", ) def __init__( self, config: CallRecordsConfig, ) -> None:",
"CallRecordsConfig, ) -> None: self.__config = config async def __get_content_from_file( self, path: str",
"mode='wb') as f: await f.write(file.content) if file.type == Filetype.MP3: audio = AudioSegment.from_mp3(filepath) elif",
"try: os.makedirs(self.__config.tmp_directory) except OSError as exc: raise Exception( f\"FileConverter: conversion directory error: `{exc!r}`.\"",
"def __init__( self, config: CallRecordsConfig, ) -> None: self.__config = config async def",
"== Filetype.WAV: audio = AudioSegment.from_wav(filepath) elif file.type == Filetype.WAVE: audio = AudioSegment.from_WAVE(filepath) else:",
"filepath = os.path.join(self.__config.tmp_directory, file.name) async with aiofiles.open(filepath, mode='wb') as f: await f.write(file.content) if",
"conversion directory error: `{exc!r}`.\" ) filepath = os.path.join(self.__config.tmp_directory, file.name) async with aiofiles.open(filepath, mode='wb')",
"\"PydubFileConverter\", ] class PydubFileConverter(IFileConverter): __slots__ = ( \"__config\", ) def __init__( self, config:",
"\"mp3\" elif new_filetype == Filetype.WAV: new_format = \"wav\" elif new_filetype == Filetype.WAVE: new_format",
"= os.path.join( self.__config.tmp_directory, \"converted_\" + file.name, ) if new_filetype == Filetype.MP3: new_format =",
"bitrate='16k' ) content = await self.__get_content_from_file(new_filepath) os.remove(filepath) os.remove(new_filepath) return File( name=file.name, type=new_filetype, content=content,"
] |
[
"requests inorder to capture paginated resources \"\"\" response,next_url = self.single_get_request(url,params) assert isinstance( response,",
"immutable = False, ) proposed_entity = mutable_entity_class.create_from_data(data) msg_content = proposed_entity.toDict() msg_content.pop('Id',None) # No",
"sending the http requests and can be easily mocked out for overall testing",
"): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory = entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method to return list of",
"from data \"\"\" def __init__( self,url,requester, entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory = entity_class_factory(self) def",
"TPJsonResponseFormat(), ) return new_entity class ObjectMappingClient(BasicClient): \"\"\" Extends the basic client to auto",
"care of sending the http requests and can be easily mocked out for",
"\"&\", self.encode_params(params), ) return final_url def make_request(self,method,url,params,response_format,**kwargs): params['format'] = response_format final_url = self.append_params(url,params)",
"# The base client creation does no error checking on uploaded data final_url",
"self._default_response_format \"Submit a get request to tp api endpoint\" return self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params):",
"dict of attributes\" # Create a local mutable entity to check data entity_class",
"def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\" # The base client",
"type> , <entity type>/<id>, <entity type>/<id>/<collection> \"\"\" assert isinstance(return_limit,int) and return_limit > 0,\\",
"uploaded data final_url = '/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request( final_url,params,data, response_format = TPJsonResponseFormat(), ) return",
"TP client caching \"\"\" # Utils # def is_sequence(elem): \"Returns true for iterables",
"following <entity type> , <entity type>/<id>, <entity type>/<id>/<collection> \"\"\" assert isinstance(return_limit,int) and return_limit",
"single string\" final_url = \"{}{}{}\".format( url, \"?\" if \"?\" not in url else",
") def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\" # The base",
"itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ), 0, return_limit ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a",
"the basic client to auto instanciate entitiy classes from data \"\"\" def __init__(",
"yield response def post_request(self,url,params,message_body,response_format=None): if not response_format: response_format = self._default_response_format encoded_message = json.dumps(message_body)",
"super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit ) if not entity_data: return [] # guard # THIS DOESN'T",
"proposed_entity.toDict() msg_content.pop('Id',None) # No ID for creation! # Send request and return resultant",
"','.join([str(x) for x in seq]) # Response formats # class TPJsonResponseFormat(object): def parse(self,response_object):",
"response_format final_url = self.append_params(url,params) print final_url r = self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status() except: print",
"def post_request(self,url,params,message_body,response_format=None): if not response_format: response_format = self._default_response_format encoded_message = json.dumps(message_body) headers =",
"'/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request( final_url,params,data, response_format = TPJsonResponseFormat(), ) return new_entity class ObjectMappingClient(BasicClient): \"\"\"",
"self._requests = requests # for mocking def encode_params(self,params): \"\"\" Override default requests.py param",
"encode_sequence(seq): return ','.join([str(x) for x in seq]) # Response formats # class TPJsonResponseFormat(object):",
"to auto instanciate entitiy classes from data \"\"\" def __init__( self,url,requester, entity_class_factory=entities.EntityClassFactory ):",
"\"?\" not in url else \"&\", self.encode_params(params), ) return final_url def make_request(self,method,url,params,response_format,**kwargs): params['format']",
"entity objects into edits - TP client caching \"\"\" # Utils # def",
"of the library \"\"\" def __init__(self,response_format=TPEntityResponseFormat): self.auth = None self._default_response_format = response_format() self._requests",
"import itertools import urllib import requests import entities import collections \"\"\" Future Todo:",
"# class BasicClient(object): \"\"\" Submits reqests to TP and returns data The two",
") def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\" # Create a",
"__init__(self,url,requester): self.requester = requester self.tp_api_url = url def authenticate(self,auth): \"Replace requester delegate with",
"<entity type>/<id>/<collection> \"\"\" assert isinstance(return_limit,int) and return_limit > 0,\\ \"return limit should be",
"Pass entity objects into edits - TP client caching \"\"\" # Utils #",
"into single string\" final_url = \"{}{}{}\".format( url, \"?\" if \"?\" not in url",
"to TP and returns data The two main use cases for this class:",
"try: r.raise_for_status() except: print \"ERROR\",final_url print r.content raise return response_format.parse(r) def single_get_request(self,url,params,response_format=None): if",
"raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used to return raw response\" final_url = '/'.join([self.tp_api_url,url]) return self.requester.single_get_request( final_url,params,response_format())",
"encoded_message = json.dumps(message_body) headers = { \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) } return self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message",
"@params entity_endpoint: can any of the following <entity type> , <entity type>/<id>, <entity",
"this functionality has been moved to the requester level \"\"\" def __init__(self,url,requester): self.requester",
"while next_url: response,next_url = self.single_get_request(next_url,params={}) yield response def post_request(self,url,params,message_body,response_format=None): if not response_format: response_format",
"not response_format: response_format = self._default_response_format \"Submit a get request to tp api endpoint\"",
"Extends the basic client to auto instanciate entitiy classes from data \"\"\" def",
"one\" self.requester.auth = auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used to return raw response\" final_url",
"[] # guard # THIS DOESN'T WORK AS I SLICE WILL BE TRUE",
"get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params entity_endpoint: can any of the following <entity type> , <entity",
"entity data and construction a deprecated third case used to be absolute url",
"- TP client caching \"\"\" # Utils # def is_sequence(elem): \"Returns true for",
"r.content raise return response_format.parse(r) def single_get_request(self,url,params,response_format=None): if not response_format: response_format = self._default_response_format \"Submit",
"\"\"\" def __init__( self,url,requester, entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory = entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend",
"for overall testing of the library \"\"\" def __init__(self,response_format=TPEntityResponseFormat): self.auth = None self._default_response_format",
"\"\"\" response,next_url = self.single_get_request(url,params) assert isinstance( response, collections.Sequence ), \"Error: Paginated Requests assume",
"data serialisation to suit TP \"\"\" final_params = { k:encode_sequence(v) if is_sequence(v) else",
"elem,collections.Sequence ) and not isinstance(elem,basestring): return True else: return False def encode_sequence(seq): return",
"layer # class HTTPRequestDispatcher(): \"\"\" A simple component wrapper over request.py functionality takes",
"delegate with authenicated one\" self.requester.auth = auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used to return",
"new_entity class ObjectMappingClient(BasicClient): \"\"\" Extends the basic client to auto instanciate entitiy classes",
"TP and returns data The two main use cases for this class: api",
"component wrapper over request.py functionality takes care of sending the http requests and",
"r = self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status() except: print \"ERROR\",final_url print r.content raise return response_format.parse(r)",
"= '/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request( final_url,params,data, response_format = TPJsonResponseFormat(), ) return new_entity class ObjectMappingClient(BasicClient):",
"\"Combine params and url into single string\" final_url = \"{}{}{}\".format( url, \"?\" if",
"# class TPJsonResponseFormat(object): def parse(self,response_object): return response_object.json() def __str__(self): return \"json\" class TPEntityResponseFormat(TPJsonResponseFormat):",
"\"Mainly used to return raw response\" final_url = '/'.join([self.tp_api_url,url]) return self.requester.single_get_request( final_url,params,response_format()) #",
"return final_url def make_request(self,method,url,params,response_format,**kwargs): params['format'] = response_format final_url = self.append_params(url,params) print final_url r",
"= \"&\".join( [\"{}={}\".format(k,v) for k,v in final_params.iteritems()] ) return urllib.quote(param_string,safe='=&,') def append_params(self,url,params): \"Combine",
"and url into single string\" final_url = \"{}{}{}\".format( url, \"?\" if \"?\" not",
"the requester level \"\"\" def __init__(self,url,requester): self.requester = requester self.tp_api_url = url def",
"Send request and return resultant entity dct = super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params ) return entity_class(dct)",
"request to tp api endpoint\" return self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params): \"\"\" Generator over a",
"return response_format.parse(r) def single_get_request(self,url,params,response_format=None): if not response_format: response_format = self._default_response_format \"Submit a get",
"def __init__( self,url,requester, entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory = entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method",
"} return self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message ) # Clients # class BasicClient(object): \"\"\" Submits",
"return False def encode_sequence(seq): return ','.join([str(x) for x in seq]) # Response formats",
"= self.single_get_request(next_url,params={}) yield response def post_request(self,url,params,message_body,response_format=None): if not response_format: response_format = self._default_response_format encoded_message",
"url endpoints for pagination but this functionality has been moved to the requester",
"self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap( lambda n:entity_class(n), entity_data ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a",
"= super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params ) return entity_class(dct) # Aliases for backwards compatability TPEntityClient =",
"api endpoints required for entity data and construction a deprecated third case used",
"def parse(self,response_object): d = super(TPEntityResponseFormat,self).parse( response_object ) return (d.get('Items',(d,)),d.get('Next')) # HTTP layer #",
"url def authenticate(self,auth): \"Replace requester delegate with authenicated one\" self.requester.auth = auth def",
"return itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ), 0, return_limit ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given",
"seq]) # Response formats # class TPJsonResponseFormat(object): def parse(self,response_object): return response_object.json() def __str__(self):",
"creation does no error checking on uploaded data final_url = '/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request(",
"cases for this class: api endpoints created from user queries api endpoints required",
"endpoints created from user queries api endpoints required for entity data and construction",
"level \"\"\" def __init__(self,url,requester): self.requester = requester self.tp_api_url = url def authenticate(self,auth): \"Replace",
"class HTTPRequestDispatcher(): \"\"\" A simple component wrapper over request.py functionality takes care of",
"\"Extend method to return list of entity instances\" entity_data = super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit )",
"self._default_response_format = response_format() self._requests = requests # for mocking def encode_params(self,params): \"\"\" Override",
"urllib.quote(param_string,safe='=&,') def append_params(self,url,params): \"Combine params and url into single string\" final_url = \"{}{}{}\".format(",
"\"\"\" Submits reqests to TP and returns data The two main use cases",
"self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params): \"\"\" Generator over a series of requests inorder to capture",
"classes from data \"\"\" def __init__( self,url,requester, entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory = entity_class_factory(self)",
"basic client to auto instanciate entitiy classes from data \"\"\" def __init__( self,url,requester,",
"iterables other than strings\" if isinstance( elem,collections.Sequence ) and not isinstance(elem,basestring): return True",
"endpoints required for entity data and construction a deprecated third case used to",
"entity_class = self.entity_class_factory.get( entity_endpoint, immutable = True, ) mutable_entity_class =self.entity_class_factory.get( entity_endpoint, immutable =",
"\"Submit a get request to tp api endpoint\" return self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params): \"\"\"",
"requester level \"\"\" def __init__(self,url,requester): self.requester = requester self.tp_api_url = url def authenticate(self,auth):",
"= self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status() except: print \"ERROR\",final_url print r.content raise return response_format.parse(r) def",
"testing of the library \"\"\" def __init__(self,response_format=TPEntityResponseFormat): self.auth = None self._default_response_format = response_format()",
"attributes\" # The base client creation does no error checking on uploaded data",
"assert isinstance( response, collections.Sequence ), \"Error: Paginated Requests assume iterable response\" yield response",
"TPJsonResponseFormat(object): def parse(self,response_object): return response_object.json() def __str__(self): return \"json\" class TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object):",
"construction a deprecated third case used to be absolute url endpoints for pagination",
"is_sequence(elem): \"Returns true for iterables other than strings\" if isinstance( elem,collections.Sequence ) and",
"in params.iteritems() if v } param_string = \"&\".join( [\"{}={}\".format(k,v) for k,v in final_params.iteritems()]",
"- Pass entity objects into edits - TP client caching \"\"\" # Utils",
"entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory = entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method to return list",
"takes care of sending the http requests and can be easily mocked out",
"final_url r = self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status() except: print \"ERROR\",final_url print r.content raise return",
"any of the following <entity type> , <entity type>/<id>, <entity type>/<id>/<collection> \"\"\" assert",
"WORK AS I SLICE WILL BE TRUE resource_type_hint = entity_endpoint.split('/')[0] entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True)",
"itertools.imap( lambda n:entity_class(n), entity_data ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of",
"paginated resources \"\"\" response,next_url = self.single_get_request(url,params) assert isinstance( response, collections.Sequence ), \"Error: Paginated",
"Paginated Requests assume iterable response\" yield response while next_url: response,next_url = self.single_get_request(next_url,params={}) yield",
"self.requester.single_get_request( final_url,params,response_format()) # SHOULD WE LEAVE PARAMS AS {}? def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params",
"The base client creation does no error checking on uploaded data final_url =",
"return [] # guard # THIS DOESN'T WORK AS I SLICE WILL BE",
"entity instances\" entity_data = super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit ) if not entity_data: return [] #",
"for k,v in params.iteritems() if v } param_string = \"&\".join( [\"{}={}\".format(k,v) for k,v",
"strings\" if isinstance( elem,collections.Sequence ) and not isinstance(elem,basestring): return True else: return False",
"= super(TPEntityResponseFormat,self).parse( response_object ) return (d.get('Items',(d,)),d.get('Next')) # HTTP layer # class HTTPRequestDispatcher(): \"\"\"",
"final_url,params,data, response_format = TPJsonResponseFormat(), ) return new_entity class ObjectMappingClient(BasicClient): \"\"\" Extends the basic",
"self._default_response_format encoded_message = json.dumps(message_body) headers = { \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) } return self.make_request( 'post',url,params,response_format=response_format,",
"requester delegate with authenicated one\" self.requester.auth = auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used to",
"data entity_class = self.entity_class_factory.get( entity_endpoint, immutable = True, ) mutable_entity_class =self.entity_class_factory.get( entity_endpoint, immutable",
"class TPJsonResponseFormat(object): def parse(self,response_object): return response_object.json() def __str__(self): return \"json\" class TPEntityResponseFormat(TPJsonResponseFormat): def",
"main use cases for this class: api endpoints created from user queries api",
"entity_endpoint, immutable = False, ) proposed_entity = mutable_entity_class.create_from_data(data) msg_content = proposed_entity.toDict() msg_content.pop('Id',None) #",
"\"\"\" def __init__(self,response_format=TPEntityResponseFormat): self.auth = None self._default_response_format = response_format() self._requests = requests #",
"can any of the following <entity type> , <entity type>/<id>, <entity type>/<id>/<collection> \"\"\"",
"'post',url,params,response_format=response_format, headers=headers,data=encoded_message ) # Clients # class BasicClient(object): \"\"\" Submits reqests to TP",
"mocking def encode_params(self,params): \"\"\" Override default requests.py param data serialisation to suit TP",
"into edits - TP client caching \"\"\" # Utils # def is_sequence(elem): \"Returns",
"DOESN'T WORK AS I SLICE WILL BE TRUE resource_type_hint = entity_endpoint.split('/')[0] entity_class =",
"required for entity data and construction a deprecated third case used to be",
"= '/'.join([self.tp_api_url,url]) return self.requester.single_get_request( final_url,params,response_format()) # SHOULD WE LEAVE PARAMS AS {}? def",
"params and url into single string\" final_url = \"{}{}{}\".format( url, \"?\" if \"?\"",
"single_get_request(self,url,params,response_format=None): if not response_format: response_format = self._default_response_format \"Submit a get request to tp",
"k:encode_sequence(v) if is_sequence(v) else str(v) for k,v in params.iteritems() if v } param_string",
"over request.py functionality takes care of sending the http requests and can be",
"assume iterable response\" yield response while next_url: response,next_url = self.single_get_request(next_url,params={}) yield response def",
"if v } param_string = \"&\".join( [\"{}={}\".format(k,v) for k,v in final_params.iteritems()] ) return",
"\"\"\" # Utils # def is_sequence(elem): \"Returns true for iterables other than strings\"",
"= \"{}{}{}\".format( url, \"?\" if \"?\" not in url else \"&\", self.encode_params(params), )",
"a local mutable entity to check data entity_class = self.entity_class_factory.get( entity_endpoint, immutable =",
"response_format() self._requests = requests # for mocking def encode_params(self,params): \"\"\" Override default requests.py",
"return resultant entity dct = super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params ) return entity_class(dct) # Aliases for",
"os import json import itertools import urllib import requests import entities import collections",
"def is_sequence(elem): \"Returns true for iterables other than strings\" if isinstance( elem,collections.Sequence )",
"reqests to TP and returns data The two main use cases for this",
"of sending the http requests and can be easily mocked out for overall",
"to return list of entity instances\" entity_data = super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit ) if not",
"= url def authenticate(self,auth): \"Replace requester delegate with authenicated one\" self.requester.auth = auth",
"collections.Sequence ), \"Error: Paginated Requests assume iterable response\" yield response while next_url: response,next_url",
"requests import entities import collections \"\"\" Future Todo: - Pass entity objects into",
"\"Create Entity given a dict of attributes\" # Create a local mutable entity",
"entity to check data entity_class = self.entity_class_factory.get( entity_endpoint, immutable = True, ) mutable_entity_class",
"simple component wrapper over request.py functionality takes care of sending the http requests",
"entity_endpoint, immutable = True, ) mutable_entity_class =self.entity_class_factory.get( entity_endpoint, immutable = False, ) proposed_entity",
"self.auth = None self._default_response_format = response_format() self._requests = requests # for mocking def",
"# for mocking def encode_params(self,params): \"\"\" Override default requests.py param data serialisation to",
") if not entity_data: return [] # guard # THIS DOESN'T WORK AS",
"if not response_format: response_format = self._default_response_format \"Submit a get request to tp api",
"'/'.join([self.tp_api_url,url]) return self.requester.single_get_request( final_url,params,response_format()) # SHOULD WE LEAVE PARAMS AS {}? def get_entities(self,entity_endpoint,params={},return_limit=50):",
"deprecated third case used to be absolute url endpoints for pagination but this",
"final_params.iteritems()] ) return urllib.quote(param_string,safe='=&,') def append_params(self,url,params): \"Combine params and url into single string\"",
"Entity given a dict of attributes\" # Create a local mutable entity to",
"str(v) for k,v in params.iteritems() if v } param_string = \"&\".join( [\"{}={}\".format(k,v) for",
"entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap( lambda n:entity_class(n), entity_data ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity",
"ID for creation! # Send request and return resultant entity dct = super(ObjectMappingClient,self).create_entity(",
"functionality has been moved to the requester level \"\"\" def __init__(self,url,requester): self.requester =",
"inorder to capture paginated resources \"\"\" response,next_url = self.single_get_request(url,params) assert isinstance( response, collections.Sequence",
"to be absolute url endpoints for pagination but this functionality has been moved",
"requests.py param data serialisation to suit TP \"\"\" final_params = { k:encode_sequence(v) if",
"functionality takes care of sending the http requests and can be easily mocked",
"THIS DOESN'T WORK AS I SLICE WILL BE TRUE resource_type_hint = entity_endpoint.split('/')[0] entity_class",
"client caching \"\"\" # Utils # def is_sequence(elem): \"Returns true for iterables other",
"and return resultant entity dct = super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params ) return entity_class(dct) # Aliases",
"edits - TP client caching \"\"\" # Utils # def is_sequence(elem): \"Returns true",
"the library \"\"\" def __init__(self,response_format=TPEntityResponseFormat): self.auth = None self._default_response_format = response_format() self._requests =",
"a series of requests inorder to capture paginated resources \"\"\" response,next_url = self.single_get_request(url,params)",
"\"\"\" assert isinstance(return_limit,int) and return_limit > 0,\\ \"return limit should be non negative",
"append_params(self,url,params): \"Combine params and url into single string\" final_url = \"{}{}{}\".format( url, \"?\"",
"create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\" # Create a local mutable",
"False def encode_sequence(seq): return ','.join([str(x) for x in seq]) # Response formats #",
"been moved to the requester level \"\"\" def __init__(self,url,requester): self.requester = requester self.tp_api_url",
"# SHOULD WE LEAVE PARAMS AS {}? def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params entity_endpoint: can",
") return final_url def make_request(self,method,url,params,response_format,**kwargs): params['format'] = response_format final_url = self.append_params(url,params) print final_url",
"url else \"&\", self.encode_params(params), ) return final_url def make_request(self,method,url,params,response_format,**kwargs): params['format'] = response_format final_url",
"class: api endpoints created from user queries api endpoints required for entity data",
"method to return list of entity instances\" entity_data = super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit ) if",
"import os import json import itertools import urllib import requests import entities import",
"\"Returns true for iterables other than strings\" if isinstance( elem,collections.Sequence ) and not",
"post_request(self,url,params,message_body,response_format=None): if not response_format: response_format = self._default_response_format encoded_message = json.dumps(message_body) headers = {",
"x in seq]) # Response formats # class TPJsonResponseFormat(object): def parse(self,response_object): return response_object.json()",
"to suit TP \"\"\" final_params = { k:encode_sequence(v) if is_sequence(v) else str(v) for",
"TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object): d = super(TPEntityResponseFormat,self).parse( response_object ) return (d.get('Items',(d,)),d.get('Next')) # HTTP layer",
"Future Todo: - Pass entity objects into edits - TP client caching \"\"\"",
"0, return_limit ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\" #",
"self.encode_params(params), ) return final_url def make_request(self,method,url,params,response_format,**kwargs): params['format'] = response_format final_url = self.append_params(url,params) print",
"def __str__(self): return \"json\" class TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object): d = super(TPEntityResponseFormat,self).parse( response_object )",
", <entity type>/<id>, <entity type>/<id>/<collection> \"\"\" assert isinstance(return_limit,int) and return_limit > 0,\\ \"return",
"\"Create Entity given a dict of attributes\" # The base client creation does",
"[\"{}={}\".format(k,v) for k,v in final_params.iteritems()] ) return urllib.quote(param_string,safe='=&,') def append_params(self,url,params): \"Combine params and",
") proposed_entity = mutable_entity_class.create_from_data(data) msg_content = proposed_entity.toDict() msg_content.pop('Id',None) # No ID for creation!",
"class ObjectMappingClient(BasicClient): \"\"\" Extends the basic client to auto instanciate entitiy classes from",
"pagination but this functionality has been moved to the requester level \"\"\" def",
"__str__(self): return \"json\" class TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object): d = super(TPEntityResponseFormat,self).parse( response_object ) return",
"), 0, return_limit ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\"",
"type>/<id>, <entity type>/<id>/<collection> \"\"\" assert isinstance(return_limit,int) and return_limit > 0,\\ \"return limit should",
"class TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object): d = super(TPEntityResponseFormat,self).parse( response_object ) return (d.get('Items',(d,)),d.get('Next')) # HTTP",
"client to auto instanciate entitiy classes from data \"\"\" def __init__( self,url,requester, entity_class_factory=entities.EntityClassFactory",
"return list of entity instances\" entity_data = super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit ) if not entity_data:",
"\"\"\" A simple component wrapper over request.py functionality takes care of sending the",
"checking on uploaded data final_url = '/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request( final_url,params,data, response_format = TPJsonResponseFormat(),",
"than strings\" if isinstance( elem,collections.Sequence ) and not isinstance(elem,basestring): return True else: return",
"non negative integer\" final_url = '/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ), 0, return_limit",
"HTTPRequestDispatcher(): \"\"\" A simple component wrapper over request.py functionality takes care of sending",
"# guard # THIS DOESN'T WORK AS I SLICE WILL BE TRUE resource_type_hint",
"else str(v) for k,v in params.iteritems() if v } param_string = \"&\".join( [\"{}={}\".format(k,v)",
"def single_get_request(self,url,params,response_format=None): if not response_format: response_format = self._default_response_format \"Submit a get request to",
"paginated_get_request(self,url,params): \"\"\" Generator over a series of requests inorder to capture paginated resources",
"def append_params(self,url,params): \"Combine params and url into single string\" final_url = \"{}{}{}\".format( url,",
"requests and can be easily mocked out for overall testing of the library",
"and returns data The two main use cases for this class: api endpoints",
"response_object.json() def __str__(self): return \"json\" class TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object): d = super(TPEntityResponseFormat,self).parse( response_object",
"next_url: response,next_url = self.single_get_request(next_url,params={}) yield response def post_request(self,url,params,message_body,response_format=None): if not response_format: response_format =",
"yield response while next_url: response,next_url = self.single_get_request(next_url,params={}) yield response def post_request(self,url,params,message_body,response_format=None): if not",
"endpoints for pagination but this functionality has been moved to the requester level",
"self.entity_class_factory = entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method to return list of entity instances\"",
"response_format: response_format = self._default_response_format \"Submit a get request to tp api endpoint\" return",
"urllib import requests import entities import collections \"\"\" Future Todo: - Pass entity",
"import json import itertools import urllib import requests import entities import collections \"\"\"",
"return self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params): \"\"\" Generator over a series of requests inorder to",
"A simple component wrapper over request.py functionality takes care of sending the http",
"def encode_sequence(seq): return ','.join([str(x) for x in seq]) # Response formats # class",
"for mocking def encode_params(self,params): \"\"\" Override default requests.py param data serialisation to suit",
"def parse(self,response_object): return response_object.json() def __str__(self): return \"json\" class TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object): d",
"return_limit > 0,\\ \"return limit should be non negative integer\" final_url = '/'.join([self.tp_api_url,entity_endpoint])",
"# Response formats # class TPJsonResponseFormat(object): def parse(self,response_object): return response_object.json() def __str__(self): return",
"TRUE resource_type_hint = entity_endpoint.split('/')[0] entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap( lambda n:entity_class(n), entity_data )",
"{}? def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params entity_endpoint: can any of the following <entity type>",
"SLICE WILL BE TRUE resource_type_hint = entity_endpoint.split('/')[0] entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap( lambda",
"can be easily mocked out for overall testing of the library \"\"\" def",
"response_format = self._default_response_format encoded_message = json.dumps(message_body) headers = { \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) } return",
"(d.get('Items',(d,)),d.get('Next')) # HTTP layer # class HTTPRequestDispatcher(): \"\"\" A simple component wrapper over",
"new_entity= self.requester.post_request( final_url,params,data, response_format = TPJsonResponseFormat(), ) return new_entity class ObjectMappingClient(BasicClient): \"\"\" Extends",
"endpoint\" return self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params): \"\"\" Generator over a series of requests inorder",
"dict of attributes\" # The base client creation does no error checking on",
"itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ), 0, return_limit ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict",
"get request to tp api endpoint\" return self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params): \"\"\" Generator over",
"response_format: response_format = self._default_response_format encoded_message = json.dumps(message_body) headers = { \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) }",
"iterable response\" yield response while next_url: response,next_url = self.single_get_request(next_url,params={}) yield response def post_request(self,url,params,message_body,response_format=None):",
"response,next_url = self.single_get_request(url,params) assert isinstance( response, collections.Sequence ), \"Error: Paginated Requests assume iterable",
"objects into edits - TP client caching \"\"\" # Utils # def is_sequence(elem):",
"response\" final_url = '/'.join([self.tp_api_url,url]) return self.requester.single_get_request( final_url,params,response_format()) # SHOULD WE LEAVE PARAMS AS",
"overall testing of the library \"\"\" def __init__(self,response_format=TPEntityResponseFormat): self.auth = None self._default_response_format =",
"r.raise_for_status() except: print \"ERROR\",final_url print r.content raise return response_format.parse(r) def single_get_request(self,url,params,response_format=None): if not",
"__init__( self,url,requester, entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory = entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method to",
"capture paginated resources \"\"\" response,next_url = self.single_get_request(url,params) assert isinstance( response, collections.Sequence ), \"Error:",
"create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\" # The base client creation",
"None self._default_response_format = response_format() self._requests = requests # for mocking def encode_params(self,params): \"\"\"",
"response_format = TPJsonResponseFormat(), ) return new_entity class ObjectMappingClient(BasicClient): \"\"\" Extends the basic client",
"serialisation to suit TP \"\"\" final_params = { k:encode_sequence(v) if is_sequence(v) else str(v)",
"wrapper over request.py functionality takes care of sending the http requests and can",
"entity_endpoint,params,return_limit ) if not entity_data: return [] # guard # THIS DOESN'T WORK",
"self.tp_api_url = url def authenticate(self,auth): \"Replace requester delegate with authenicated one\" self.requester.auth =",
"of requests inorder to capture paginated resources \"\"\" response,next_url = self.single_get_request(url,params) assert isinstance(",
"of the following <entity type> , <entity type>/<id>, <entity type>/<id>/<collection> \"\"\" assert isinstance(return_limit,int)",
"return ','.join([str(x) for x in seq]) # Response formats # class TPJsonResponseFormat(object): def",
"negative integer\" final_url = '/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ), 0, return_limit )",
"request.py functionality takes care of sending the http requests and can be easily",
"return new_entity class ObjectMappingClient(BasicClient): \"\"\" Extends the basic client to auto instanciate entitiy",
"\"\"\" Extends the basic client to auto instanciate entitiy classes from data \"\"\"",
"# class HTTPRequestDispatcher(): \"\"\" A simple component wrapper over request.py functionality takes care",
"\"\"\" @params entity_endpoint: can any of the following <entity type> , <entity type>/<id>,",
"of attributes\" # The base client creation does no error checking on uploaded",
"entity dct = super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params ) return entity_class(dct) # Aliases for backwards compatability",
"except: print \"ERROR\",final_url print r.content raise return response_format.parse(r) def single_get_request(self,url,params,response_format=None): if not response_format:",
"the following <entity type> , <entity type>/<id>, <entity type>/<id>/<collection> \"\"\" assert isinstance(return_limit,int) and",
"client creation does no error checking on uploaded data final_url = '/'.join([self.tp_api_url,entity_endpoint]) new_entity=",
"mutable entity to check data entity_class = self.entity_class_factory.get( entity_endpoint, immutable = True, )",
"'/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ), 0, return_limit ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity",
"use cases for this class: api endpoints created from user queries api endpoints",
"self.requester.paginated_get_request(final_url,params) ), 0, return_limit ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of",
"k,v in params.iteritems() if v } param_string = \"&\".join( [\"{}={}\".format(k,v) for k,v in",
"Todo: - Pass entity objects into edits - TP client caching \"\"\" #",
"collections \"\"\" Future Todo: - Pass entity objects into edits - TP client",
"\"\"\" final_params = { k:encode_sequence(v) if is_sequence(v) else str(v) for k,v in params.iteritems()",
"request and return resultant entity dct = super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params ) return entity_class(dct) #",
"\"json\" class TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object): d = super(TPEntityResponseFormat,self).parse( response_object ) return (d.get('Items',(d,)),d.get('Next')) #",
"\"{}{}{}\".format( url, \"?\" if \"?\" not in url else \"&\", self.encode_params(params), ) return",
"class BasicClient(object): \"\"\" Submits reqests to TP and returns data The two main",
"for x in seq]) # Response formats # class TPJsonResponseFormat(object): def parse(self,response_object): return",
"= self.append_params(url,params) print final_url r = self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status() except: print \"ERROR\",final_url print",
"print final_url r = self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status() except: print \"ERROR\",final_url print r.content raise",
") # Clients # class BasicClient(object): \"\"\" Submits reqests to TP and returns",
"of entity instances\" entity_data = super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit ) if not entity_data: return []",
"True, ) mutable_entity_class =self.entity_class_factory.get( entity_endpoint, immutable = False, ) proposed_entity = mutable_entity_class.create_from_data(data) msg_content",
"formats # class TPJsonResponseFormat(object): def parse(self,response_object): return response_object.json() def __str__(self): return \"json\" class",
"series of requests inorder to capture paginated resources \"\"\" response,next_url = self.single_get_request(url,params) assert",
"BasicClient(object): \"\"\" Submits reqests to TP and returns data The two main use",
"> 0,\\ \"return limit should be non negative integer\" final_url = '/'.join([self.tp_api_url,entity_endpoint]) return",
"Response formats # class TPJsonResponseFormat(object): def parse(self,response_object): return response_object.json() def __str__(self): return \"json\"",
"= requests # for mocking def encode_params(self,params): \"\"\" Override default requests.py param data",
"\"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) } return self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message ) # Clients # class BasicClient(object):",
"library \"\"\" def __init__(self,response_format=TPEntityResponseFormat): self.auth = None self._default_response_format = response_format() self._requests = requests",
"final_url = self.append_params(url,params) print final_url r = self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status() except: print \"ERROR\",final_url",
"# Clients # class BasicClient(object): \"\"\" Submits reqests to TP and returns data",
"self,url,requester, entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory = entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method to return",
"entity_data = super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit ) if not entity_data: return [] # guard #",
"mutable_entity_class.create_from_data(data) msg_content = proposed_entity.toDict() msg_content.pop('Id',None) # No ID for creation! # Send request",
"\"\"\" Generator over a series of requests inorder to capture paginated resources \"\"\"",
"easily mocked out for overall testing of the library \"\"\" def __init__(self,response_format=TPEntityResponseFormat): self.auth",
"to check data entity_class = self.entity_class_factory.get( entity_endpoint, immutable = True, ) mutable_entity_class =self.entity_class_factory.get(",
"\"content-length\":len(encoded_message) } return self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message ) # Clients # class BasicClient(object): \"\"\"",
"response, collections.Sequence ), \"Error: Paginated Requests assume iterable response\" yield response while next_url:",
"# HTTP layer # class HTTPRequestDispatcher(): \"\"\" A simple component wrapper over request.py",
"response_format = self._default_response_format \"Submit a get request to tp api endpoint\" return self.make_request('get',url,params,response_format)",
"but this functionality has been moved to the requester level \"\"\" def __init__(self,url,requester):",
"msg_content.pop('Id',None) # No ID for creation! # Send request and return resultant entity",
"super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory = entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method to return list of entity",
"self.requester.post_request( final_url,params,data, response_format = TPJsonResponseFormat(), ) return new_entity class ObjectMappingClient(BasicClient): \"\"\" Extends the",
"super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params ) return entity_class(dct) # Aliases for backwards compatability TPEntityClient = ObjectMappingClient",
"to the requester level \"\"\" def __init__(self,url,requester): self.requester = requester self.tp_api_url = url",
"self.append_params(url,params) print final_url r = self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status() except: print \"ERROR\",final_url print r.content",
"in url else \"&\", self.encode_params(params), ) return final_url def make_request(self,method,url,params,response_format,**kwargs): params['format'] = response_format",
"instanciate entitiy classes from data \"\"\" def __init__( self,url,requester, entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory",
"no error checking on uploaded data final_url = '/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request( final_url,params,data, response_format",
"isinstance( response, collections.Sequence ), \"Error: Paginated Requests assume iterable response\" yield response while",
"response_format.parse(r) def single_get_request(self,url,params,response_format=None): if not response_format: response_format = self._default_response_format \"Submit a get request",
"self.requester = requester self.tp_api_url = url def authenticate(self,auth): \"Replace requester delegate with authenicated",
"moved to the requester level \"\"\" def __init__(self,url,requester): self.requester = requester self.tp_api_url =",
"not isinstance(elem,basestring): return True else: return False def encode_sequence(seq): return ','.join([str(x) for x",
"print r.content raise return response_format.parse(r) def single_get_request(self,url,params,response_format=None): if not response_format: response_format = self._default_response_format",
"response,next_url = self.single_get_request(next_url,params={}) yield response def post_request(self,url,params,message_body,response_format=None): if not response_format: response_format = self._default_response_format",
"assert isinstance(return_limit,int) and return_limit > 0,\\ \"return limit should be non negative integer\"",
"return self.requester.single_get_request( final_url,params,response_format()) # SHOULD WE LEAVE PARAMS AS {}? def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\"",
") return urllib.quote(param_string,safe='=&,') def append_params(self,url,params): \"Combine params and url into single string\" final_url",
"over a series of requests inorder to capture paginated resources \"\"\" response,next_url =",
"{ k:encode_sequence(v) if is_sequence(v) else str(v) for k,v in params.iteritems() if v }",
"params['format'] = response_format final_url = self.append_params(url,params) print final_url r = self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status()",
"error checking on uploaded data final_url = '/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request( final_url,params,data, response_format =",
"auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used to return raw response\" final_url = '/'.join([self.tp_api_url,url]) return",
"= response_format() self._requests = requests # for mocking def encode_params(self,params): \"\"\" Override default",
"import collections \"\"\" Future Todo: - Pass entity objects into edits - TP",
"case used to be absolute url endpoints for pagination but this functionality has",
"WE LEAVE PARAMS AS {}? def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params entity_endpoint: can any of",
"authenicated one\" self.requester.auth = auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used to return raw response\"",
"# Utils # def is_sequence(elem): \"Returns true for iterables other than strings\" if",
"given a dict of attributes\" # Create a local mutable entity to check",
"=self.entity_class_factory.get( entity_endpoint, immutable = False, ) proposed_entity = mutable_entity_class.create_from_data(data) msg_content = proposed_entity.toDict() msg_content.pop('Id',None)",
"else \"&\", self.encode_params(params), ) return final_url def make_request(self,method,url,params,response_format,**kwargs): params['format'] = response_format final_url =",
"parse(self,response_object): return response_object.json() def __str__(self): return \"json\" class TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object): d =",
"for k,v in final_params.iteritems()] ) return urllib.quote(param_string,safe='=&,') def append_params(self,url,params): \"Combine params and url",
"if not response_format: response_format = self._default_response_format encoded_message = json.dumps(message_body) headers = { \"content-type\":\"application/\"+str(response_format),",
"to return raw response\" final_url = '/'.join([self.tp_api_url,url]) return self.requester.single_get_request( final_url,params,response_format()) # SHOULD WE",
"Override default requests.py param data serialisation to suit TP \"\"\" final_params = {",
"given a dict of attributes\" # The base client creation does no error",
"user queries api endpoints required for entity data and construction a deprecated third",
"= response_format final_url = self.append_params(url,params) print final_url r = self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status() except:",
"= { k:encode_sequence(v) if is_sequence(v) else str(v) for k,v in params.iteritems() if v",
"final_url,params,response_format()) # SHOULD WE LEAVE PARAMS AS {}? def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params entity_endpoint:",
"= auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used to return raw response\" final_url = '/'.join([self.tp_api_url,url])",
"on uploaded data final_url = '/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request( final_url,params,data, response_format = TPJsonResponseFormat(), )",
"msg_content = proposed_entity.toDict() msg_content.pop('Id',None) # No ID for creation! # Send request and",
"lambda n:entity_class(n), entity_data ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\"",
"requests # for mocking def encode_params(self,params): \"\"\" Override default requests.py param data serialisation",
"final_url = \"{}{}{}\".format( url, \"?\" if \"?\" not in url else \"&\", self.encode_params(params),",
"v } param_string = \"&\".join( [\"{}={}\".format(k,v) for k,v in final_params.iteritems()] ) return urllib.quote(param_string,safe='=&,')",
"= '/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ), 0, return_limit ) def create_entity(self,entity_endpoint,data,params={}): \"Create",
"other than strings\" if isinstance( elem,collections.Sequence ) and not isinstance(elem,basestring): return True else:",
"return urllib.quote(param_string,safe='=&,') def append_params(self,url,params): \"Combine params and url into single string\" final_url =",
"be non negative integer\" final_url = '/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ), 0,",
"limit should be non negative integer\" final_url = '/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params)",
"a get request to tp api endpoint\" return self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params): \"\"\" Generator",
"response\" yield response while next_url: response,next_url = self.single_get_request(next_url,params={}) yield response def post_request(self,url,params,message_body,response_format=None): if",
"return response_object.json() def __str__(self): return \"json\" class TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object): d = super(TPEntityResponseFormat,self).parse(",
"instances\" entity_data = super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit ) if not entity_data: return [] # guard",
"response def post_request(self,url,params,message_body,response_format=None): if not response_format: response_format = self._default_response_format encoded_message = json.dumps(message_body) headers",
"of attributes\" # Create a local mutable entity to check data entity_class =",
") mutable_entity_class =self.entity_class_factory.get( entity_endpoint, immutable = False, ) proposed_entity = mutable_entity_class.create_from_data(data) msg_content =",
"two main use cases for this class: api endpoints created from user queries",
"with authenicated one\" self.requester.auth = auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used to return raw",
"= proposed_entity.toDict() msg_content.pop('Id',None) # No ID for creation! # Send request and return",
"param_string = \"&\".join( [\"{}={}\".format(k,v) for k,v in final_params.iteritems()] ) return urllib.quote(param_string,safe='=&,') def append_params(self,url,params):",
"not entity_data: return [] # guard # THIS DOESN'T WORK AS I SLICE",
"be absolute url endpoints for pagination but this functionality has been moved to",
"queries api endpoints required for entity data and construction a deprecated third case",
"requester self.tp_api_url = url def authenticate(self,auth): \"Replace requester delegate with authenicated one\" self.requester.auth",
"d = super(TPEntityResponseFormat,self).parse( response_object ) return (d.get('Items',(d,)),d.get('Next')) # HTTP layer # class HTTPRequestDispatcher():",
"for pagination but this functionality has been moved to the requester level \"\"\"",
"entity_endpoint: can any of the following <entity type> , <entity type>/<id>, <entity type>/<id>/<collection>",
"def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method to return list of entity instances\" entity_data = super(TPEntityClient,self).get_entities(",
"= entity_endpoint.split('/')[0] entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap( lambda n:entity_class(n), entity_data ) def create_entity(self,entity_endpoint,data,params={}):",
"\"Error: Paginated Requests assume iterable response\" yield response while next_url: response,next_url = self.single_get_request(next_url,params={})",
"= self._default_response_format encoded_message = json.dumps(message_body) headers = { \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) } return self.make_request(",
"= mutable_entity_class.create_from_data(data) msg_content = proposed_entity.toDict() msg_content.pop('Id',None) # No ID for creation! # Send",
"string\" final_url = \"{}{}{}\".format( url, \"?\" if \"?\" not in url else \"&\",",
"\"?\" if \"?\" not in url else \"&\", self.encode_params(params), ) return final_url def",
"is_sequence(v) else str(v) for k,v in params.iteritems() if v } param_string = \"&\".join(",
"mocked out for overall testing of the library \"\"\" def __init__(self,response_format=TPEntityResponseFormat): self.auth =",
"= entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method to return list of entity instances\" entity_data",
"= self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap( lambda n:entity_class(n), entity_data ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given",
"def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used to return raw response\" final_url = '/'.join([self.tp_api_url,url]) return self.requester.single_get_request(",
"make_request(self,method,url,params,response_format,**kwargs): params['format'] = response_format final_url = self.append_params(url,params) print final_url r = self._requests.request(method,final_url,auth=self.auth,**kwargs) try:",
"), \"Error: Paginated Requests assume iterable response\" yield response while next_url: response,next_url =",
"a deprecated third case used to be absolute url endpoints for pagination but",
"response while next_url: response,next_url = self.single_get_request(next_url,params={}) yield response def post_request(self,url,params,message_body,response_format=None): if not response_format:",
"type>/<id>/<collection> \"\"\" assert isinstance(return_limit,int) and return_limit > 0,\\ \"return limit should be non",
"def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params entity_endpoint: can any of the following <entity type> ,",
"HTTP layer # class HTTPRequestDispatcher(): \"\"\" A simple component wrapper over request.py functionality",
"return self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message ) # Clients # class BasicClient(object): \"\"\" Submits reqests",
"immutable = True, ) mutable_entity_class =self.entity_class_factory.get( entity_endpoint, immutable = False, ) proposed_entity =",
"itertools import urllib import requests import entities import collections \"\"\" Future Todo: -",
"json.dumps(message_body) headers = { \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) } return self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message ) #",
"return_limit ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\" # The",
"def __init__(self,url,requester): self.requester = requester self.tp_api_url = url def authenticate(self,auth): \"Replace requester delegate",
"= requester self.tp_api_url = url def authenticate(self,auth): \"Replace requester delegate with authenicated one\"",
"list of entity instances\" entity_data = super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit ) if not entity_data: return",
"absolute url endpoints for pagination but this functionality has been moved to the",
"to tp api endpoint\" return self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params): \"\"\" Generator over a series",
"= json.dumps(message_body) headers = { \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) } return self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message )",
"resultant entity dct = super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params ) return entity_class(dct) # Aliases for backwards",
"in final_params.iteritems()] ) return urllib.quote(param_string,safe='=&,') def append_params(self,url,params): \"Combine params and url into single",
"to capture paginated resources \"\"\" response,next_url = self.single_get_request(url,params) assert isinstance( response, collections.Sequence ),",
"= { \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) } return self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message ) # Clients #",
"AS {}? def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params entity_endpoint: can any of the following <entity",
"final_url = '/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ), 0, return_limit ) def create_entity(self,entity_endpoint,data,params={}):",
"def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\" # Create a local",
"self._requests.request(method,final_url,auth=self.auth,**kwargs) try: r.raise_for_status() except: print \"ERROR\",final_url print r.content raise return response_format.parse(r) def single_get_request(self,url,params,response_format=None):",
"self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message ) # Clients # class BasicClient(object): \"\"\" Submits reqests to",
"true for iterables other than strings\" if isinstance( elem,collections.Sequence ) and not isinstance(elem,basestring):",
"# Create a local mutable entity to check data entity_class = self.entity_class_factory.get( entity_endpoint,",
"} param_string = \"&\".join( [\"{}={}\".format(k,v) for k,v in final_params.iteritems()] ) return urllib.quote(param_string,safe='=&,') def",
"self.requester.auth = auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used to return raw response\" final_url =",
"returns data The two main use cases for this class: api endpoints created",
"api endpoint\" return self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params): \"\"\" Generator over a series of requests",
"used to be absolute url endpoints for pagination but this functionality has been",
"= self._default_response_format \"Submit a get request to tp api endpoint\" return self.make_request('get',url,params,response_format) def",
"import urllib import requests import entities import collections \"\"\" Future Todo: - Pass",
") return new_entity class ObjectMappingClient(BasicClient): \"\"\" Extends the basic client to auto instanciate",
"<entity type>/<id>, <entity type>/<id>/<collection> \"\"\" assert isinstance(return_limit,int) and return_limit > 0,\\ \"return limit",
"entity_endpoint.split('/')[0] entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap( lambda n:entity_class(n), entity_data ) def create_entity(self,entity_endpoint,data,params={}): \"Create",
"SHOULD WE LEAVE PARAMS AS {}? def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params entity_endpoint: can any",
"from user queries api endpoints required for entity data and construction a deprecated",
"data final_url = '/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request( final_url,params,data, response_format = TPJsonResponseFormat(), ) return new_entity",
"{ \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) } return self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message ) # Clients # class",
"headers = { \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message) } return self.make_request( 'post',url,params,response_format=response_format, headers=headers,data=encoded_message ) # Clients",
"local mutable entity to check data entity_class = self.entity_class_factory.get( entity_endpoint, immutable = True,",
"isinstance(return_limit,int) and return_limit > 0,\\ \"return limit should be non negative integer\" final_url",
"params.iteritems() if v } param_string = \"&\".join( [\"{}={}\".format(k,v) for k,v in final_params.iteritems()] )",
"= TPJsonResponseFormat(), ) return new_entity class ObjectMappingClient(BasicClient): \"\"\" Extends the basic client to",
"entity_data ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\" # Create",
"headers=headers,data=encoded_message ) # Clients # class BasicClient(object): \"\"\" Submits reqests to TP and",
"\"\"\" def __init__(self,url,requester): self.requester = requester self.tp_api_url = url def authenticate(self,auth): \"Replace requester",
"http requests and can be easily mocked out for overall testing of the",
"0,\\ \"return limit should be non negative integer\" final_url = '/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice(",
"raw response\" final_url = '/'.join([self.tp_api_url,url]) return self.requester.single_get_request( final_url,params,response_format()) # SHOULD WE LEAVE PARAMS",
") return (d.get('Items',(d,)),d.get('Next')) # HTTP layer # class HTTPRequestDispatcher(): \"\"\" A simple component",
"for creation! # Send request and return resultant entity dct = super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params",
"def __init__(self,response_format=TPEntityResponseFormat): self.auth = None self._default_response_format = response_format() self._requests = requests # for",
"raise return response_format.parse(r) def single_get_request(self,url,params,response_format=None): if not response_format: response_format = self._default_response_format \"Submit a",
"TP \"\"\" final_params = { k:encode_sequence(v) if is_sequence(v) else str(v) for k,v in",
"PARAMS AS {}? def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params entity_endpoint: can any of the following",
"\"\"\" Override default requests.py param data serialisation to suit TP \"\"\" final_params =",
"for this class: api endpoints created from user queries api endpoints required for",
"No ID for creation! # Send request and return resultant entity dct =",
"__init__(self,response_format=TPEntityResponseFormat): self.auth = None self._default_response_format = response_format() self._requests = requests # for mocking",
"\"\"\" Future Todo: - Pass entity objects into edits - TP client caching",
"if isinstance( elem,collections.Sequence ) and not isinstance(elem,basestring): return True else: return False def",
"data and construction a deprecated third case used to be absolute url endpoints",
"proposed_entity = mutable_entity_class.create_from_data(data) msg_content = proposed_entity.toDict() msg_content.pop('Id',None) # No ID for creation! #",
"data \"\"\" def __init__( self,url,requester, entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory = entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50):",
"import entities import collections \"\"\" Future Todo: - Pass entity objects into edits",
"return (d.get('Items',(d,)),d.get('Next')) # HTTP layer # class HTTPRequestDispatcher(): \"\"\" A simple component wrapper",
"should be non negative integer\" final_url = '/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ),",
"BE TRUE resource_type_hint = entity_endpoint.split('/')[0] entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap( lambda n:entity_class(n), entity_data",
"url, \"?\" if \"?\" not in url else \"&\", self.encode_params(params), ) return final_url",
"The two main use cases for this class: api endpoints created from user",
"return \"json\" class TPEntityResponseFormat(TPJsonResponseFormat): def parse(self,response_object): d = super(TPEntityResponseFormat,self).parse( response_object ) return (d.get('Items',(d,)),d.get('Next'))",
"# def is_sequence(elem): \"Returns true for iterables other than strings\" if isinstance( elem,collections.Sequence",
"k,v in final_params.iteritems()] ) return urllib.quote(param_string,safe='=&,') def append_params(self,url,params): \"Combine params and url into",
"def make_request(self,method,url,params,response_format,**kwargs): params['format'] = response_format final_url = self.append_params(url,params) print final_url r = self._requests.request(method,final_url,auth=self.auth,**kwargs)",
"guard # THIS DOESN'T WORK AS I SLICE WILL BE TRUE resource_type_hint =",
"# No ID for creation! # Send request and return resultant entity dct",
"self.single_get_request(next_url,params={}) yield response def post_request(self,url,params,message_body,response_format=None): if not response_format: response_format = self._default_response_format encoded_message =",
"if not entity_data: return [] # guard # THIS DOESN'T WORK AS I",
"url into single string\" final_url = \"{}{}{}\".format( url, \"?\" if \"?\" not in",
"isinstance( elem,collections.Sequence ) and not isinstance(elem,basestring): return True else: return False def encode_sequence(seq):",
"super(TPEntityResponseFormat,self).parse( response_object ) return (d.get('Items',(d,)),d.get('Next')) # HTTP layer # class HTTPRequestDispatcher(): \"\"\" A",
"mutable_entity_class =self.entity_class_factory.get( entity_endpoint, immutable = False, ) proposed_entity = mutable_entity_class.create_from_data(data) msg_content = proposed_entity.toDict()",
"final_url def make_request(self,method,url,params,response_format,**kwargs): params['format'] = response_format final_url = self.append_params(url,params) print final_url r =",
"\"&\".join( [\"{}={}\".format(k,v) for k,v in final_params.iteritems()] ) return urllib.quote(param_string,safe='=&,') def append_params(self,url,params): \"Combine params",
"\"return limit should be non negative integer\" final_url = '/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice( itertools.chain.from_iterable(",
"creation! # Send request and return resultant entity dct = super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params )",
"def encode_params(self,params): \"\"\" Override default requests.py param data serialisation to suit TP \"\"\"",
"and return_limit > 0,\\ \"return limit should be non negative integer\" final_url =",
"json import itertools import urllib import requests import entities import collections \"\"\" Future",
"return itertools.imap( lambda n:entity_class(n), entity_data ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict",
"this class: api endpoints created from user queries api endpoints required for entity",
"import requests import entities import collections \"\"\" Future Todo: - Pass entity objects",
"print \"ERROR\",final_url print r.content raise return response_format.parse(r) def single_get_request(self,url,params,response_format=None): if not response_format: response_format",
"created from user queries api endpoints required for entity data and construction a",
"if \"?\" not in url else \"&\", self.encode_params(params), ) return final_url def make_request(self,method,url,params,response_format,**kwargs):",
"<entity type> , <entity type>/<id>, <entity type>/<id>/<collection> \"\"\" assert isinstance(return_limit,int) and return_limit >",
"False, ) proposed_entity = mutable_entity_class.create_from_data(data) msg_content = proposed_entity.toDict() msg_content.pop('Id',None) # No ID for",
"= self.entity_class_factory.get( entity_endpoint, immutable = True, ) mutable_entity_class =self.entity_class_factory.get( entity_endpoint, immutable = False,",
"n:entity_class(n), entity_data ) def create_entity(self,entity_endpoint,data,params={}): \"Create Entity given a dict of attributes\" #",
"api endpoints created from user queries api endpoints required for entity data and",
"\"Replace requester delegate with authenicated one\" self.requester.auth = auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly used",
"entities import collections \"\"\" Future Todo: - Pass entity objects into edits -",
"Submits reqests to TP and returns data The two main use cases for",
"out for overall testing of the library \"\"\" def __init__(self,response_format=TPEntityResponseFormat): self.auth = None",
"be easily mocked out for overall testing of the library \"\"\" def __init__(self,response_format=TPEntityResponseFormat):",
"final_params = { k:encode_sequence(v) if is_sequence(v) else str(v) for k,v in params.iteritems() if",
"self.single_get_request(url,params) assert isinstance( response, collections.Sequence ), \"Error: Paginated Requests assume iterable response\" yield",
"not in url else \"&\", self.encode_params(params), ) return final_url def make_request(self,method,url,params,response_format,**kwargs): params['format'] =",
"if is_sequence(v) else str(v) for k,v in params.iteritems() if v } param_string =",
"authenticate(self,auth): \"Replace requester delegate with authenicated one\" self.requester.auth = auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat): \"Mainly",
"WILL BE TRUE resource_type_hint = entity_endpoint.split('/')[0] entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap( lambda n:entity_class(n),",
"caching \"\"\" # Utils # def is_sequence(elem): \"Returns true for iterables other than",
"third case used to be absolute url endpoints for pagination but this functionality",
"auto instanciate entitiy classes from data \"\"\" def __init__( self,url,requester, entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester)",
"I SLICE WILL BE TRUE resource_type_hint = entity_endpoint.split('/')[0] entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap(",
"Requests assume iterable response\" yield response while next_url: response,next_url = self.single_get_request(next_url,params={}) yield response",
"a dict of attributes\" # The base client creation does no error checking",
"entitiy classes from data \"\"\" def __init__( self,url,requester, entity_class_factory=entities.EntityClassFactory ): super(ObjectMappingClient,self).__init__(url,requester) self.entity_class_factory =",
"entity_class_factory(self) def get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method to return list of entity instances\" entity_data =",
"LEAVE PARAMS AS {}? def get_entities(self,entity_endpoint,params={},return_limit=50): \"\"\" @params entity_endpoint: can any of the",
"param data serialisation to suit TP \"\"\" final_params = { k:encode_sequence(v) if is_sequence(v)",
"Entity given a dict of attributes\" # The base client creation does no",
"else: return False def encode_sequence(seq): return ','.join([str(x) for x in seq]) # Response",
"= self.single_get_request(url,params) assert isinstance( response, collections.Sequence ), \"Error: Paginated Requests assume iterable response\"",
"self.entity_class_factory.get( entity_endpoint, immutable = True, ) mutable_entity_class =self.entity_class_factory.get( entity_endpoint, immutable = False, )",
"response_object ) return (d.get('Items',(d,)),d.get('Next')) # HTTP layer # class HTTPRequestDispatcher(): \"\"\" A simple",
"return True else: return False def encode_sequence(seq): return ','.join([str(x) for x in seq])",
"parse(self,response_object): d = super(TPEntityResponseFormat,self).parse( response_object ) return (d.get('Items',(d,)),d.get('Next')) # HTTP layer # class",
"suit TP \"\"\" final_params = { k:encode_sequence(v) if is_sequence(v) else str(v) for k,v",
"Create a local mutable entity to check data entity_class = self.entity_class_factory.get( entity_endpoint, immutable",
"a dict of attributes\" # Create a local mutable entity to check data",
"= None self._default_response_format = response_format() self._requests = requests # for mocking def encode_params(self,params):",
"def paginated_get_request(self,url,params): \"\"\" Generator over a series of requests inorder to capture paginated",
"def authenticate(self,auth): \"Replace requester delegate with authenicated one\" self.requester.auth = auth def raw_request(self,url,params={},response_format=TPJsonResponseFormat):",
"= False, ) proposed_entity = mutable_entity_class.create_from_data(data) msg_content = proposed_entity.toDict() msg_content.pop('Id',None) # No ID",
") and not isinstance(elem,basestring): return True else: return False def encode_sequence(seq): return ','.join([str(x)",
"Generator over a series of requests inorder to capture paginated resources \"\"\" response,next_url",
"check data entity_class = self.entity_class_factory.get( entity_endpoint, immutable = True, ) mutable_entity_class =self.entity_class_factory.get( entity_endpoint,",
"isinstance(elem,basestring): return True else: return False def encode_sequence(seq): return ','.join([str(x) for x in",
"# THIS DOESN'T WORK AS I SLICE WILL BE TRUE resource_type_hint = entity_endpoint.split('/')[0]",
"has been moved to the requester level \"\"\" def __init__(self,url,requester): self.requester = requester",
"dct = super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params ) return entity_class(dct) # Aliases for backwards compatability TPEntityClient",
"get_entities(self,entity_endpoint,params={},return_limit=50): \"Extend method to return list of entity instances\" entity_data = super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit",
"True else: return False def encode_sequence(seq): return ','.join([str(x) for x in seq]) #",
"entity_data: return [] # guard # THIS DOESN'T WORK AS I SLICE WILL",
"the http requests and can be easily mocked out for overall testing of",
"return raw response\" final_url = '/'.join([self.tp_api_url,url]) return self.requester.single_get_request( final_url,params,response_format()) # SHOULD WE LEAVE",
"default requests.py param data serialisation to suit TP \"\"\" final_params = { k:encode_sequence(v)",
"Clients # class BasicClient(object): \"\"\" Submits reqests to TP and returns data The",
"final_url = '/'.join([self.tp_api_url,url]) return self.requester.single_get_request( final_url,params,response_format()) # SHOULD WE LEAVE PARAMS AS {}?",
"= True, ) mutable_entity_class =self.entity_class_factory.get( entity_endpoint, immutable = False, ) proposed_entity = mutable_entity_class.create_from_data(data)",
"not response_format: response_format = self._default_response_format encoded_message = json.dumps(message_body) headers = { \"content-type\":\"application/\"+str(response_format), \"content-length\":len(encoded_message)",
"and not isinstance(elem,basestring): return True else: return False def encode_sequence(seq): return ','.join([str(x) for",
"\"ERROR\",final_url print r.content raise return response_format.parse(r) def single_get_request(self,url,params,response_format=None): if not response_format: response_format =",
"used to return raw response\" final_url = '/'.join([self.tp_api_url,url]) return self.requester.single_get_request( final_url,params,response_format()) # SHOULD",
"= super(TPEntityClient,self).get_entities( entity_endpoint,params,return_limit ) if not entity_data: return [] # guard # THIS",
"attributes\" # Create a local mutable entity to check data entity_class = self.entity_class_factory.get(",
"final_url = '/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request( final_url,params,data, response_format = TPJsonResponseFormat(), ) return new_entity class",
"<reponame>ash30/tpapi<filename>tpapi/client.py import os import json import itertools import urllib import requests import entities",
"does no error checking on uploaded data final_url = '/'.join([self.tp_api_url,entity_endpoint]) new_entity= self.requester.post_request( final_url,params,data,",
"data The two main use cases for this class: api endpoints created from",
"and construction a deprecated third case used to be absolute url endpoints for",
"base client creation does no error checking on uploaded data final_url = '/'.join([self.tp_api_url,entity_endpoint])",
"AS I SLICE WILL BE TRUE resource_type_hint = entity_endpoint.split('/')[0] entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True) return",
"and can be easily mocked out for overall testing of the library \"\"\"",
"tp api endpoint\" return self.make_request('get',url,params,response_format) def paginated_get_request(self,url,params): \"\"\" Generator over a series of",
"resource_type_hint = entity_endpoint.split('/')[0] entity_class = self.entity_class_factory.get(resource_type_hint,immutable=True) return itertools.imap( lambda n:entity_class(n), entity_data ) def",
"for iterables other than strings\" if isinstance( elem,collections.Sequence ) and not isinstance(elem,basestring): return",
"Utils # def is_sequence(elem): \"Returns true for iterables other than strings\" if isinstance(",
"encode_params(self,params): \"\"\" Override default requests.py param data serialisation to suit TP \"\"\" final_params",
"# Send request and return resultant entity dct = super(ObjectMappingClient,self).create_entity( entity_endpoint,msg_content,params ) return",
"for entity data and construction a deprecated third case used to be absolute",
"in seq]) # Response formats # class TPJsonResponseFormat(object): def parse(self,response_object): return response_object.json() def",
"integer\" final_url = '/'.join([self.tp_api_url,entity_endpoint]) return itertools.islice( itertools.chain.from_iterable( self.requester.paginated_get_request(final_url,params) ), 0, return_limit ) def",
"ObjectMappingClient(BasicClient): \"\"\" Extends the basic client to auto instanciate entitiy classes from data",
"resources \"\"\" response,next_url = self.single_get_request(url,params) assert isinstance( response, collections.Sequence ), \"Error: Paginated Requests"
] |
[
"== False: num = int(input('''\\033[1;36mDigite um numero inteiro:\\033[m ''')) soma += num cont",
"[ 1 ] SIM [ 2 ] NÃO''') opcao = int(input('Escolha sua opção:",
"1: parar = False else: print('OPÇÃO INVALIDA.') media = soma/cont print(f'A média é",
"== 2: parar = True elif opcao == 1: parar = False else:",
"= 0 cont = 0 while parar == False: num = int(input('''\\033[1;36mDigite um",
"= 0 while parar == False: num = int(input('''\\033[1;36mDigite um numero inteiro:\\033[m '''))",
"opcao = int(input('Escolha sua opção: ')) if opcao == 2: parar = True",
"num = int(input('''\\033[1;36mDigite um numero inteiro:\\033[m ''')) soma += num cont += 1",
"continuar? [ 1 ] SIM [ 2 ] NÃO''') opcao = int(input('Escolha sua",
"parar = False soma = 0 cont = 0 while parar == False:",
"= True elif opcao == 1: parar = False else: print('OPÇÃO INVALIDA.') media",
"0 while parar == False: num = int(input('''\\033[1;36mDigite um numero inteiro:\\033[m ''')) soma",
"numero inteiro:\\033[m ''')) soma += num cont += 1 print('''Deseja continuar? [ 1",
"if opcao == 2: parar = True elif opcao == 1: parar =",
"inteiro:\\033[m ''')) soma += num cont += 1 print('''Deseja continuar? [ 1 ]",
"] NÃO''') opcao = int(input('Escolha sua opção: ')) if opcao == 2: parar",
"= False soma = 0 cont = 0 while parar == False: num",
"opcao == 1: parar = False else: print('OPÇÃO INVALIDA.') media = soma/cont print(f'A",
"False: num = int(input('''\\033[1;36mDigite um numero inteiro:\\033[m ''')) soma += num cont +=",
"+= num cont += 1 print('''Deseja continuar? [ 1 ] SIM [ 2",
"False soma = 0 cont = 0 while parar == False: num =",
"soma = 0 cont = 0 while parar == False: num = int(input('''\\033[1;36mDigite",
"int(input('Escolha sua opção: ')) if opcao == 2: parar = True elif opcao",
"opção: ')) if opcao == 2: parar = True elif opcao == 1:",
"True elif opcao == 1: parar = False else: print('OPÇÃO INVALIDA.') media =",
"= int(input('''\\033[1;36mDigite um numero inteiro:\\033[m ''')) soma += num cont += 1 print('''Deseja",
"sua opção: ')) if opcao == 2: parar = True elif opcao ==",
"soma += num cont += 1 print('''Deseja continuar? [ 1 ] SIM [",
"print('''Deseja continuar? [ 1 ] SIM [ 2 ] NÃO''') opcao = int(input('Escolha",
"')) if opcao == 2: parar = True elif opcao == 1: parar",
"2: parar = True elif opcao == 1: parar = False else: print('OPÇÃO",
"um numero inteiro:\\033[m ''')) soma += num cont += 1 print('''Deseja continuar? [",
"num cont += 1 print('''Deseja continuar? [ 1 ] SIM [ 2 ]",
"cont += 1 print('''Deseja continuar? [ 1 ] SIM [ 2 ] NÃO''')",
"elif opcao == 1: parar = False else: print('OPÇÃO INVALIDA.') media = soma/cont",
"== 1: parar = False else: print('OPÇÃO INVALIDA.') media = soma/cont print(f'A média",
"int(input('''\\033[1;36mDigite um numero inteiro:\\033[m ''')) soma += num cont += 1 print('''Deseja continuar?",
"0 cont = 0 while parar == False: num = int(input('''\\033[1;36mDigite um numero",
"[ 2 ] NÃO''') opcao = int(input('Escolha sua opção: ')) if opcao ==",
"opcao == 2: parar = True elif opcao == 1: parar = False",
"parar = True elif opcao == 1: parar = False else: print('OPÇÃO INVALIDA.')",
"SIM [ 2 ] NÃO''') opcao = int(input('Escolha sua opção: ')) if opcao",
"= int(input('Escolha sua opção: ')) if opcao == 2: parar = True elif",
"while parar == False: num = int(input('''\\033[1;36mDigite um numero inteiro:\\033[m ''')) soma +=",
"parar = False else: print('OPÇÃO INVALIDA.') media = soma/cont print(f'A média é {media}')",
"2 ] NÃO''') opcao = int(input('Escolha sua opção: ')) if opcao == 2:",
"NÃO''') opcao = int(input('Escolha sua opção: ')) if opcao == 2: parar =",
"1 ] SIM [ 2 ] NÃO''') opcao = int(input('Escolha sua opção: '))",
"+= 1 print('''Deseja continuar? [ 1 ] SIM [ 2 ] NÃO''') opcao",
"1 print('''Deseja continuar? [ 1 ] SIM [ 2 ] NÃO''') opcao =",
"parar == False: num = int(input('''\\033[1;36mDigite um numero inteiro:\\033[m ''')) soma += num",
"''')) soma += num cont += 1 print('''Deseja continuar? [ 1 ] SIM",
"cont = 0 while parar == False: num = int(input('''\\033[1;36mDigite um numero inteiro:\\033[m",
"] SIM [ 2 ] NÃO''') opcao = int(input('Escolha sua opção: ')) if"
] |
[
"train=True) print('Data Test: ') return self.model.test(test_loader, show, filename) def __build_model(self): # build Model",
"epoch in range(1, self.num_epoch+1): print \"Epoch \" + str(epoch) + \"/\" + str(self.num_epoch)",
"\" + str(epoch) + \"/\" + str(self.num_epoch) self.model.train(epoch, train_loader, train_size) self.model.validation(valid_loader, valid_size) print('Data",
"standard }, filename) def create_model(self, times, show): self.file_accucary = np.zeros(len(self.filenames)) for _ in",
"dataset: \" + str(file) + \"\\n\" dataset_x, dataset_y = self.get_dataset(file) X_train, X_test, y_train,",
"= np.asarray([l[1:] for l in dataset]) dataset_y = np.asarray([l[0] for l in dataset])",
"X_train, y_train, X_test, y_test, matriz_size): if type == Type.mlp: train_data = Dataset(X_train, y_train,",
"self.input_layer = input_layer self.hidden_layer = hidden_layer self.output_layer = output_layer self.matriz_size = matriz_size self.device",
"xavier.net.cnn import Cnn from xavier.net.mlp import Mlp from xavier.net.rnn_lstm_2 import Rnn torch.manual_seed(1234) class",
"'dataset.csv', delimiter=',', dtype=np.float64) dataset_x = np.asarray([l[1:] for l in dataset]) dataset_y = np.asarray([l[0]",
"Type.rnn: train_data = Dataset(X_train, y_train, Type.rnn, matriz_size) test_data = Dataset(X_test, y_test, Type.rnn, matriz_size)",
"Type.rnn, matriz_size) test_data = Dataset(X_test, y_test, Type.rnn, matriz_size) elif type == Type.cnn: train_data",
"== Type.mlp: model = Mlp(self.device).to(self.device) elif self.type == Type.rnn: model = Rnn(self.device).to(self.device) print(model)",
"import get_standard, init_standard from xavier.net.cnn import Cnn from xavier.net.mlp import Mlp from xavier.net.rnn_lstm_2",
"= batch_size self.learning_rate = learning_rate self.filenames_models = filenames_models self.num_epoch = num_epoch self.file_accucary =",
"nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=self.learning_rate) model = model.to(self.device) model_training = Training( model, criterion,",
"= input_layer self.hidden_layer = hidden_layer self.output_layer = output_layer self.matriz_size = matriz_size self.device =",
"if not os.path.exists(path): os.mkdir(path) filename = path + '/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({ 'model': model.model.state_dict(),",
"'/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({ 'model': model.model.state_dict(), 'standard': standard }, filename) def create_model(self, times, show):",
"type, path_model, model, acc, standard): # Save the Trained Model path = path_model+type.value",
"optimizer = optim.Adam(model.parameters(), lr=self.learning_rate) model = model.to(self.device) model_training = Training( model, criterion, optimizer,",
"np.asarray([l[1:] for l in dataset]) dataset_y = np.asarray([l[0] for l in dataset]) return",
"dataset_x, dataset_y def get_normalization(self, X_train, X_test): if self.type == Type.mlp: X_train, standard =",
"train_test_split from torch import nn import xavier.constants.config as config from xavier.constants.type import Type",
"self.device) return model_training def get_dataset(self, file): dataset = np.loadtxt(file + 'dataset.csv', delimiter=',', dtype=np.float64)",
"self.hidden_layer = hidden_layer self.output_layer = output_layer self.matriz_size = matriz_size self.device = device self.filenames",
"type self.batch_size = batch_size self.learning_rate = learning_rate self.filenames_models = filenames_models self.num_epoch = num_epoch",
"torch import nn import xavier.constants.config as config from xavier.constants.type import Type from xavier.core.dataLoader",
"Save the Trained Model path = path_model+type.value if not os.path.exists(path): os.mkdir(path) filename =",
"import train_test_split from torch import nn import xavier.constants.config as config from xavier.constants.type import",
"self.learning_rate = learning_rate self.filenames_models = filenames_models self.num_epoch = num_epoch self.file_accucary = np.zeros(len(self.filenames)) self.version",
"return train_loader, valid_loader, test_loader, train_size, valid_size def save_model(self, type, path_model, model, acc, standard):",
"Type.rnn, matriz_size) elif type == Type.cnn: train_data = Dataset(X_train, y_train, Type.cnn, matriz_size) test_data",
"train_data = Dataset(X_train, y_train, Type.mlp) test_data = Dataset(X_test, y_test, Type.mlp) elif type ==",
"= Mlp(self.device).to(self.device) elif self.type == Type.rnn: model = Rnn(self.device).to(self.device) print(model) elif self.type ==",
"y_train, Type.mlp) test_data = Dataset(X_test, y_test, Type.mlp) elif type == Type.rnn: train_data =",
"= model.to(self.device) model_training = Training( model, criterion, optimizer, self.device) return model_training def get_dataset(self,",
"_ in range(times): for idx, file in enumerate(self.filenames): print \"\\nTraining dataset: \" +",
"# choose optimizer and loss function criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=self.learning_rate)",
"test_loader, train_size, valid_size = self.get_loader( self.type, self.batch_size, X_train, y_train, X_test, y_test, self.matriz_size) self.input_layer",
"python # -*- coding: utf-8 -*- import os import time import numpy as",
"# -*- coding: utf-8 -*- import os import time import numpy as np",
"= get_standard(X_test, standard) return X_train, X_test, standard elif self.type == Type.cnn: X_train, standard",
"from torch import nn import xavier.constants.config as config from xavier.constants.type import Type from",
"os import time import numpy as np import torch import torch.optim as optim",
"get_normalization(self, X_train, X_test): if self.type == Type.mlp: X_train, standard = init_standard(X_train) X_test =",
"Dataset(X_train, y_train, Type.rnn, matriz_size) test_data = Dataset(X_test, y_test, Type.rnn, matriz_size) elif type ==",
"dataset_y = self.get_dataset(file) X_train, X_test, y_train, y_test = train_test_split( dataset_x, dataset_y, test_size=0.2, random_state=21)",
"matriz_size) test_data = Dataset(X_test, y_test, Type.cnn, matriz_size) dataLoader = DataLoader() train_loader, valid_loader, train_size,",
"X_train.shape[1] self.model = self.__build_model() path = self.filenames_models[idx] + \\ self.type.value + '/'+self.version accuracy",
"def __init__(self, filenames, filenames_models, device, learning_rate, num_epoch, batch_size, input_layer=0, hidden_layer=0, output_layer=3, matriz_size=0, type=Type.mlp):",
"train_loader, train_size) self.model.validation(valid_loader, valid_size) print('Data train: ') self.model.validation(train_loader, train_size, train=True) print('Data Test: ')",
"enumerate(self.filenames): print \"\\nTraining dataset: \" + str(file) + \"\\n\" dataset_x, dataset_y = self.get_dataset(file)",
"Type.cnn: model = Cnn(self.device).to(self.device) # choose optimizer and loss function criterion = nn.CrossEntropyLoss()",
"train: ') self.model.validation(train_loader, train_size, train=True) print('Data Test: ') return self.model.test(test_loader, show, filename) def",
"valid_loader, train_size, valid_size = dataLoader.get_train( train_data, batch_size) test_loader = dataLoader.get_test(test_data, batch_size) return train_loader,",
"in enumerate(self.filenames): print \"\\nTraining dataset: \" + str(file) + \"\\n\" dataset_x, dataset_y =",
"return X_train, X_test, standard def get_loader(self, type, batch_size, X_train, y_train, X_test, y_test, matriz_size):",
"return X_train, X_test, standard elif self.type == Type.cnn: X_train, standard = init_standard(X_train) X_test",
"torch import torch.optim as optim from sklearn.model_selection import train_test_split from torch import nn",
"times, show): self.file_accucary = np.zeros(len(self.filenames)) for _ in range(times): for idx, file in",
"self.model.train(epoch, train_loader, train_size) self.model.validation(valid_loader, valid_size) print('Data train: ') self.model.validation(train_loader, train_size, train=True) print('Data Test:",
"self.file_accucary = np.zeros(len(self.filenames)) for _ in range(times): for idx, file in enumerate(self.filenames): print",
"= Training( model, criterion, optimizer, self.device) return model_training def get_dataset(self, file): dataset =",
"def get_dataset(self, file): dataset = np.loadtxt(file + 'dataset.csv', delimiter=',', dtype=np.float64) dataset_x = np.asarray([l[1:]",
"standard = self.get_normalization( X_train, X_test) train_loader, valid_loader, test_loader, train_size, valid_size = self.get_loader( self.type,",
"def create_model(self, times, show): self.file_accucary = np.zeros(len(self.filenames)) for _ in range(times): for idx,",
"self.filenames_models[idx] + \\ self.type.value + '/'+self.version accuracy = self.__train_model(train_loader, valid_loader, test_loader, train_size, valid_size,",
"num_epoch, batch_size, input_layer=0, hidden_layer=0, output_layer=3, matriz_size=0, type=Type.mlp): self.input_layer = input_layer self.hidden_layer = hidden_layer",
"self.version = config.VERSION def __train_model(self, train_loader, valid_loader, test_loader, train_size, valid_size, show, filename): self.model.first_time",
"Dataset(X_train, y_train, Type.mlp) test_data = Dataset(X_test, y_test, Type.mlp) elif type == Type.rnn: train_data",
"test_data = Dataset(X_test, y_test, Type.mlp) elif type == Type.rnn: train_data = Dataset(X_train, y_train,",
"X_train, X_test, standard elif self.type == Type.cnn: X_train, standard = init_standard(X_train) X_test =",
"Mlp from xavier.net.rnn_lstm_2 import Rnn torch.manual_seed(1234) class Model(object): def __init__(self, filenames, filenames_models, device,",
"self.type == Type.cnn: model = Cnn(self.device).to(self.device) # choose optimizer and loss function criterion",
"time import numpy as np import torch import torch.optim as optim from sklearn.model_selection",
"train_size) self.model.validation(valid_loader, valid_size) print('Data train: ') self.model.validation(train_loader, train_size, train=True) print('Data Test: ') return",
"= num_epoch self.file_accucary = np.zeros(len(self.filenames)) self.version = config.VERSION def __train_model(self, train_loader, valid_loader, test_loader,",
"optim from sklearn.model_selection import train_test_split from torch import nn import xavier.constants.config as config",
"Dataset(X_test, y_test, Type.cnn, matriz_size) dataLoader = DataLoader() train_loader, valid_loader, train_size, valid_size = dataLoader.get_train(",
"show, filename): self.model.first_time = time.time() for epoch in range(1, self.num_epoch+1): print \"Epoch \"",
"== Type.rnn: train_data = Dataset(X_train, y_train, Type.rnn, matriz_size) test_data = Dataset(X_test, y_test, Type.rnn,",
"in range(times): for idx, file in enumerate(self.filenames): print \"\\nTraining dataset: \" + str(file)",
"test_data = Dataset(X_test, y_test, Type.rnn, matriz_size) elif type == Type.cnn: train_data = Dataset(X_train,",
"and loss function criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=self.learning_rate) model = model.to(self.device)",
"dataset_y def get_normalization(self, X_train, X_test): if self.type == Type.mlp: X_train, standard = init_standard(X_train)",
"X_train, X_test, standard = self.get_normalization( X_train, X_test) train_loader, valid_loader, test_loader, train_size, valid_size =",
"y_train, Type.rnn, matriz_size) test_data = Dataset(X_test, y_test, Type.rnn, matriz_size) elif type == Type.cnn:",
"for idx, file in enumerate(self.filenames): print \"\\nTraining dataset: \" + str(file) + \"\\n\"",
"y_test = train_test_split( dataset_x, dataset_y, test_size=0.2, random_state=21) X_train, X_test, standard = self.get_normalization( X_train,",
"standard elif self.type == Type.cnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard)",
"for l in dataset]) return dataset_x, dataset_y def get_normalization(self, X_train, X_test): if self.type",
"learning_rate, num_epoch, batch_size, input_layer=0, hidden_layer=0, output_layer=3, matriz_size=0, type=Type.mlp): self.input_layer = input_layer self.hidden_layer =",
"xavier.constants.config as config from xavier.constants.type import Type from xavier.core.dataLoader import DataLoader from xavier.core.dataset",
"valid_size = self.get_loader( self.type, self.batch_size, X_train, y_train, X_test, y_test, self.matriz_size) self.input_layer = X_train.shape[1]",
"for l in dataset]) dataset_y = np.asarray([l[0] for l in dataset]) return dataset_x,",
"config.VERSION def __train_model(self, train_loader, valid_loader, test_loader, train_size, valid_size, show, filename): self.model.first_time = time.time()",
"optimizer and loss function criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=self.learning_rate) model =",
"from sklearn.model_selection import train_test_split from torch import nn import xavier.constants.config as config from",
"= get_standard(X_test, standard) return X_train, X_test, standard def get_loader(self, type, batch_size, X_train, y_train,",
"dataLoader.get_train( train_data, batch_size) test_loader = dataLoader.get_test(test_data, batch_size) return train_loader, valid_loader, test_loader, train_size, valid_size",
"self.get_loader( self.type, self.batch_size, X_train, y_train, X_test, y_test, self.matriz_size) self.input_layer = X_train.shape[1] self.model =",
"valid_loader, test_loader, train_size, valid_size, show, filename): self.model.first_time = time.time() for epoch in range(1,",
"loss function criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=self.learning_rate) model = model.to(self.device) model_training",
"standard) return X_train, X_test, standard elif self.type == Type.rnn: X_train, standard = init_standard(X_train)",
"import Training from xavier.core.transformation import get_standard, init_standard from xavier.net.cnn import Cnn from xavier.net.mlp",
"Dataset(X_test, y_test, Type.rnn, matriz_size) elif type == Type.cnn: train_data = Dataset(X_train, y_train, Type.cnn,",
"self.batch_size = batch_size self.learning_rate = learning_rate self.filenames_models = filenames_models self.num_epoch = num_epoch self.file_accucary",
"from xavier.core.transformation import get_standard, init_standard from xavier.net.cnn import Cnn from xavier.net.mlp import Mlp",
"device, learning_rate, num_epoch, batch_size, input_layer=0, hidden_layer=0, output_layer=3, matriz_size=0, type=Type.mlp): self.input_layer = input_layer self.hidden_layer",
"Test: ') return self.model.test(test_loader, show, filename) def __build_model(self): # build Model if self.type",
"l in dataset]) return dataset_x, dataset_y def get_normalization(self, X_train, X_test): if self.type ==",
"filename): self.model.first_time = time.time() for epoch in range(1, self.num_epoch+1): print \"Epoch \" +",
"optimizer, self.device) return model_training def get_dataset(self, file): dataset = np.loadtxt(file + 'dataset.csv', delimiter=',',",
"= self.filenames_models[idx] + \\ self.type.value + '/'+self.version accuracy = self.__train_model(train_loader, valid_loader, test_loader, train_size,",
"path = path_model+type.value if not os.path.exists(path): os.mkdir(path) filename = path + '/'+self.version+' {:.2f}'.format(acc)+'.pkl'",
"Training( model, criterion, optimizer, self.device) return model_training def get_dataset(self, file): dataset = np.loadtxt(file",
"= self.__build_model() path = self.filenames_models[idx] + \\ self.type.value + '/'+self.version accuracy = self.__train_model(train_loader,",
"utf-8 -*- import os import time import numpy as np import torch import",
"= self.get_loader( self.type, self.batch_size, X_train, y_train, X_test, y_test, self.matriz_size) self.input_layer = X_train.shape[1] self.model",
"test_loader, train_size, valid_size, show, filename): self.model.first_time = time.time() for epoch in range(1, self.num_epoch+1):",
"+ \\ self.type.value + '/'+self.version accuracy = self.__train_model(train_loader, valid_loader, test_loader, train_size, valid_size, show,",
"str(file) + \"\\n\" dataset_x, dataset_y = self.get_dataset(file) X_train, X_test, y_train, y_test = train_test_split(",
"Type.mlp) test_data = Dataset(X_test, y_test, Type.mlp) elif type == Type.rnn: train_data = Dataset(X_train,",
"# build Model if self.type == Type.mlp: model = Mlp(self.device).to(self.device) elif self.type ==",
"Type.rnn: model = Rnn(self.device).to(self.device) print(model) elif self.type == Type.cnn: model = Cnn(self.device).to(self.device) #",
"= optim.Adam(model.parameters(), lr=self.learning_rate) model = model.to(self.device) model_training = Training( model, criterion, optimizer, self.device)",
"batch_size, input_layer=0, hidden_layer=0, output_layer=3, matriz_size=0, type=Type.mlp): self.input_layer = input_layer self.hidden_layer = hidden_layer self.output_layer",
"Type.cnn, matriz_size) dataLoader = DataLoader() train_loader, valid_loader, train_size, valid_size = dataLoader.get_train( train_data, batch_size)",
"def get_normalization(self, X_train, X_test): if self.type == Type.mlp: X_train, standard = init_standard(X_train) X_test",
"for epoch in range(1, self.num_epoch+1): print \"Epoch \" + str(epoch) + \"/\" +",
"Type.cnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard",
"= matriz_size self.device = device self.filenames = filenames self.matriz_size = matriz_size self.type =",
"self.device = device self.filenames = filenames self.matriz_size = matriz_size self.type = type self.batch_size",
"import Type from xavier.core.dataLoader import DataLoader from xavier.core.dataset import Dataset from xavier.core.training import",
"\"\\n\" dataset_x, dataset_y = self.get_dataset(file) X_train, X_test, y_train, y_test = train_test_split( dataset_x, dataset_y,",
"import os import time import numpy as np import torch import torch.optim as",
"path_model, model, acc, standard): # Save the Trained Model path = path_model+type.value if",
"== Type.mlp: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test,",
"batch_size, X_train, y_train, X_test, y_test, matriz_size): if type == Type.mlp: train_data = Dataset(X_train,",
"Type.mlp: model = Mlp(self.device).to(self.device) elif self.type == Type.rnn: model = Rnn(self.device).to(self.device) print(model) elif",
"get_standard(X_test, standard) return X_train, X_test, standard def get_loader(self, type, batch_size, X_train, y_train, X_test,",
"model, acc, standard): # Save the Trained Model path = path_model+type.value if not",
"+ str(self.num_epoch) self.model.train(epoch, train_loader, train_size) self.model.validation(valid_loader, valid_size) print('Data train: ') self.model.validation(train_loader, train_size, train=True)",
"= np.loadtxt(file + 'dataset.csv', delimiter=',', dtype=np.float64) dataset_x = np.asarray([l[1:] for l in dataset])",
"= get_standard(X_test, standard) return X_train, X_test, standard elif self.type == Type.rnn: X_train, standard",
"path_model+type.value if not os.path.exists(path): os.mkdir(path) filename = path + '/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({ 'model':",
"y_train, Type.cnn, matriz_size) test_data = Dataset(X_test, y_test, Type.cnn, matriz_size) dataLoader = DataLoader() train_loader,",
"= X_train.shape[1] self.model = self.__build_model() path = self.filenames_models[idx] + \\ self.type.value + '/'+self.version",
"Cnn from xavier.net.mlp import Mlp from xavier.net.rnn_lstm_2 import Rnn torch.manual_seed(1234) class Model(object): def",
"= Dataset(X_test, y_test, Type.cnn, matriz_size) dataLoader = DataLoader() train_loader, valid_loader, train_size, valid_size =",
"np.asarray([l[0] for l in dataset]) return dataset_x, dataset_y def get_normalization(self, X_train, X_test): if",
"self.output_layer = output_layer self.matriz_size = matriz_size self.device = device self.filenames = filenames self.matriz_size",
"X_test) train_loader, valid_loader, test_loader, train_size, valid_size = self.get_loader( self.type, self.batch_size, X_train, y_train, X_test,",
"type == Type.cnn: train_data = Dataset(X_train, y_train, Type.cnn, matriz_size) test_data = Dataset(X_test, y_test,",
"Type.cnn, matriz_size) test_data = Dataset(X_test, y_test, Type.cnn, matriz_size) dataLoader = DataLoader() train_loader, valid_loader,",
"X_train, y_train, X_test, y_test, self.matriz_size) self.input_layer = X_train.shape[1] self.model = self.__build_model() path =",
"range(times): for idx, file in enumerate(self.filenames): print \"\\nTraining dataset: \" + str(file) +",
"self.__train_model(train_loader, valid_loader, test_loader, train_size, valid_size, show, filename=path) if accuracy > self.file_accucary[idx]: self.save_model(self.type, self.filenames_models[idx],",
"self.type = type self.batch_size = batch_size self.learning_rate = learning_rate self.filenames_models = filenames_models self.num_epoch",
"input_layer self.hidden_layer = hidden_layer self.output_layer = output_layer self.matriz_size = matriz_size self.device = device",
"= nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=self.learning_rate) model = model.to(self.device) model_training = Training( model,",
"from xavier.core.dataset import Dataset from xavier.core.training import Training from xavier.core.transformation import get_standard, init_standard",
"in dataset]) return dataset_x, dataset_y def get_normalization(self, X_train, X_test): if self.type == Type.mlp:",
"filename) def __build_model(self): # build Model if self.type == Type.mlp: model = Mlp(self.device).to(self.device)",
"output_layer=3, matriz_size=0, type=Type.mlp): self.input_layer = input_layer self.hidden_layer = hidden_layer self.output_layer = output_layer self.matriz_size",
"the Trained Model path = path_model+type.value if not os.path.exists(path): os.mkdir(path) filename = path",
"idx, file in enumerate(self.filenames): print \"\\nTraining dataset: \" + str(file) + \"\\n\" dataset_x,",
"self.model.validation(valid_loader, valid_size) print('Data train: ') self.model.validation(train_loader, train_size, train=True) print('Data Test: ') return self.model.test(test_loader,",
"X_test, standard = self.get_normalization( X_train, X_test) train_loader, valid_loader, test_loader, train_size, valid_size = self.get_loader(",
"= dataLoader.get_train( train_data, batch_size) test_loader = dataLoader.get_test(test_data, batch_size) return train_loader, valid_loader, test_loader, train_size,",
"train_data = Dataset(X_train, y_train, Type.cnn, matriz_size) test_data = Dataset(X_test, y_test, Type.cnn, matriz_size) dataLoader",
"matriz_size self.type = type self.batch_size = batch_size self.learning_rate = learning_rate self.filenames_models = filenames_models",
"model, criterion, optimizer, self.device) return model_training def get_dataset(self, file): dataset = np.loadtxt(file +",
"elif self.type == Type.cnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return",
"DataLoader from xavier.core.dataset import Dataset from xavier.core.training import Training from xavier.core.transformation import get_standard,",
"self.get_dataset(file) X_train, X_test, y_train, y_test = train_test_split( dataset_x, dataset_y, test_size=0.2, random_state=21) X_train, X_test,",
"time.time() for epoch in range(1, self.num_epoch+1): print \"Epoch \" + str(epoch) + \"/\"",
"standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard elif self.type",
"X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard def",
"file): dataset = np.loadtxt(file + 'dataset.csv', delimiter=',', dtype=np.float64) dataset_x = np.asarray([l[1:] for l",
"as np import torch import torch.optim as optim from sklearn.model_selection import train_test_split from",
"Dataset(X_train, y_train, Type.cnn, matriz_size) test_data = Dataset(X_test, y_test, Type.cnn, matriz_size) dataLoader = DataLoader()",
"X_test, standard def get_loader(self, type, batch_size, X_train, y_train, X_test, y_test, matriz_size): if type",
"sklearn.model_selection import train_test_split from torch import nn import xavier.constants.config as config from xavier.constants.type",
"hidden_layer=0, output_layer=3, matriz_size=0, type=Type.mlp): self.input_layer = input_layer self.hidden_layer = hidden_layer self.output_layer = output_layer",
"range(1, self.num_epoch+1): print \"Epoch \" + str(epoch) + \"/\" + str(self.num_epoch) self.model.train(epoch, train_loader,",
"dataset]) return dataset_x, dataset_y def get_normalization(self, X_train, X_test): if self.type == Type.mlp: X_train,",
"from xavier.net.cnn import Cnn from xavier.net.mlp import Mlp from xavier.net.rnn_lstm_2 import Rnn torch.manual_seed(1234)",
"criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=self.learning_rate) model = model.to(self.device) model_training = Training(",
"optim.Adam(model.parameters(), lr=self.learning_rate) model = model.to(self.device) model_training = Training( model, criterion, optimizer, self.device) return",
"Model path = path_model+type.value if not os.path.exists(path): os.mkdir(path) filename = path + '/'+self.version+'",
"l in dataset]) dataset_y = np.asarray([l[0] for l in dataset]) return dataset_x, dataset_y",
"Type from xavier.core.dataLoader import DataLoader from xavier.core.dataset import Dataset from xavier.core.training import Training",
"batch_size self.learning_rate = learning_rate self.filenames_models = filenames_models self.num_epoch = num_epoch self.file_accucary = np.zeros(len(self.filenames))",
"== Type.cnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test,",
"model_training def get_dataset(self, file): dataset = np.loadtxt(file + 'dataset.csv', delimiter=',', dtype=np.float64) dataset_x =",
"random_state=21) X_train, X_test, standard = self.get_normalization( X_train, X_test) train_loader, valid_loader, test_loader, train_size, valid_size",
"+ '/'+self.version accuracy = self.__train_model(train_loader, valid_loader, test_loader, train_size, valid_size, show, filename=path) if accuracy",
"return model_training def get_dataset(self, file): dataset = np.loadtxt(file + 'dataset.csv', delimiter=',', dtype=np.float64) dataset_x",
"xavier.core.training import Training from xavier.core.transformation import get_standard, init_standard from xavier.net.cnn import Cnn from",
"batch_size) test_loader = dataLoader.get_test(test_data, batch_size) return train_loader, valid_loader, test_loader, train_size, valid_size def save_model(self,",
"os.mkdir(path) filename = path + '/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({ 'model': model.model.state_dict(), 'standard': standard },",
"self.__build_model() path = self.filenames_models[idx] + \\ self.type.value + '/'+self.version accuracy = self.__train_model(train_loader, valid_loader,",
"from xavier.constants.type import Type from xavier.core.dataLoader import DataLoader from xavier.core.dataset import Dataset from",
"return self.model.test(test_loader, show, filename) def __build_model(self): # build Model if self.type == Type.mlp:",
"def get_loader(self, type, batch_size, X_train, y_train, X_test, y_test, matriz_size): if type == Type.mlp:",
"train_data, batch_size) test_loader = dataLoader.get_test(test_data, batch_size) return train_loader, valid_loader, test_loader, train_size, valid_size def",
"= filenames self.matriz_size = matriz_size self.type = type self.batch_size = batch_size self.learning_rate =",
"X_test, y_test, matriz_size): if type == Type.mlp: train_data = Dataset(X_train, y_train, Type.mlp) test_data",
"filenames_models self.num_epoch = num_epoch self.file_accucary = np.zeros(len(self.filenames)) self.version = config.VERSION def __train_model(self, train_loader,",
"dataset_y, test_size=0.2, random_state=21) X_train, X_test, standard = self.get_normalization( X_train, X_test) train_loader, valid_loader, test_loader,",
"dataset]) dataset_y = np.asarray([l[0] for l in dataset]) return dataset_x, dataset_y def get_normalization(self,",
"self.get_normalization( X_train, X_test) train_loader, valid_loader, test_loader, train_size, valid_size = self.get_loader( self.type, self.batch_size, X_train,",
"if type == Type.mlp: train_data = Dataset(X_train, y_train, Type.mlp) test_data = Dataset(X_test, y_test,",
"dataset_x, dataset_y = self.get_dataset(file) X_train, X_test, y_train, y_test = train_test_split( dataset_x, dataset_y, test_size=0.2,",
"model = Cnn(self.device).to(self.device) # choose optimizer and loss function criterion = nn.CrossEntropyLoss() optimizer",
"= time.time() for epoch in range(1, self.num_epoch+1): print \"Epoch \" + str(epoch) +",
"__build_model(self): # build Model if self.type == Type.mlp: model = Mlp(self.device).to(self.device) elif self.type",
"= np.zeros(len(self.filenames)) self.version = config.VERSION def __train_model(self, train_loader, valid_loader, test_loader, train_size, valid_size, show,",
"+ \"/\" + str(self.num_epoch) self.model.train(epoch, train_loader, train_size) self.model.validation(valid_loader, valid_size) print('Data train: ') self.model.validation(train_loader,",
"import xavier.constants.config as config from xavier.constants.type import Type from xavier.core.dataLoader import DataLoader from",
"if self.type == Type.mlp: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return",
"lr=self.learning_rate) model = model.to(self.device) model_training = Training( model, criterion, optimizer, self.device) return model_training",
"self.model.first_time = time.time() for epoch in range(1, self.num_epoch+1): print \"Epoch \" + str(epoch)",
"numpy as np import torch import torch.optim as optim from sklearn.model_selection import train_test_split",
"= Dataset(X_train, y_train, Type.mlp) test_data = Dataset(X_test, y_test, Type.mlp) elif type == Type.rnn:",
"xavier.net.mlp import Mlp from xavier.net.rnn_lstm_2 import Rnn torch.manual_seed(1234) class Model(object): def __init__(self, filenames,",
"model = Rnn(self.device).to(self.device) print(model) elif self.type == Type.cnn: model = Cnn(self.device).to(self.device) # choose",
"dataLoader.get_test(test_data, batch_size) return train_loader, valid_loader, test_loader, train_size, valid_size def save_model(self, type, path_model, model,",
"print \"Epoch \" + str(epoch) + \"/\" + str(self.num_epoch) self.model.train(epoch, train_loader, train_size) self.model.validation(valid_loader,",
"self.model = self.__build_model() path = self.filenames_models[idx] + \\ self.type.value + '/'+self.version accuracy =",
"device self.filenames = filenames self.matriz_size = matriz_size self.type = type self.batch_size = batch_size",
"'/'+self.version accuracy = self.__train_model(train_loader, valid_loader, test_loader, train_size, valid_size, show, filename=path) if accuracy >",
"self.type == Type.rnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train,",
"return X_train, X_test, standard elif self.type == Type.rnn: X_train, standard = init_standard(X_train) X_test",
"standard def get_loader(self, type, batch_size, X_train, y_train, X_test, y_test, matriz_size): if type ==",
"# Save the Trained Model path = path_model+type.value if not os.path.exists(path): os.mkdir(path) filename",
"model_training = Training( model, criterion, optimizer, self.device) return model_training def get_dataset(self, file): dataset",
"= filenames_models self.num_epoch = num_epoch self.file_accucary = np.zeros(len(self.filenames)) self.version = config.VERSION def __train_model(self,",
"valid_size, show, filename=path) if accuracy > self.file_accucary[idx]: self.save_model(self.type, self.filenames_models[idx], self.model, accuracy, standard) self.file_accucary[idx]",
"torch.save({ 'model': model.model.state_dict(), 'standard': standard }, filename) def create_model(self, times, show): self.file_accucary =",
"def save_model(self, type, path_model, model, acc, standard): # Save the Trained Model path",
"import time import numpy as np import torch import torch.optim as optim from",
"from xavier.core.training import Training from xavier.core.transformation import get_standard, init_standard from xavier.net.cnn import Cnn",
"self.type == Type.cnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train,",
"\\ self.type.value + '/'+self.version accuracy = self.__train_model(train_loader, valid_loader, test_loader, train_size, valid_size, show, filename=path)",
"= Dataset(X_train, y_train, Type.rnn, matriz_size) test_data = Dataset(X_test, y_test, Type.rnn, matriz_size) elif type",
"Mlp(self.device).to(self.device) elif self.type == Type.rnn: model = Rnn(self.device).to(self.device) print(model) elif self.type == Type.cnn:",
"num_epoch self.file_accucary = np.zeros(len(self.filenames)) self.version = config.VERSION def __train_model(self, train_loader, valid_loader, test_loader, train_size,",
"get_standard(X_test, standard) return X_train, X_test, standard elif self.type == Type.cnn: X_train, standard =",
"Trained Model path = path_model+type.value if not os.path.exists(path): os.mkdir(path) filename = path +",
"filename = path + '/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({ 'model': model.model.state_dict(), 'standard': standard }, filename)",
"y_train, X_test, y_test, matriz_size): if type == Type.mlp: train_data = Dataset(X_train, y_train, Type.mlp)",
"print \"\\nTraining dataset: \" + str(file) + \"\\n\" dataset_x, dataset_y = self.get_dataset(file) X_train,",
"type == Type.rnn: train_data = Dataset(X_train, y_train, Type.rnn, matriz_size) test_data = Dataset(X_test, y_test,",
"train_size, valid_size = dataLoader.get_train( train_data, batch_size) test_loader = dataLoader.get_test(test_data, batch_size) return train_loader, valid_loader,",
"= train_test_split( dataset_x, dataset_y, test_size=0.2, random_state=21) X_train, X_test, standard = self.get_normalization( X_train, X_test)",
"train_size, valid_size, show, filename): self.model.first_time = time.time() for epoch in range(1, self.num_epoch+1): print",
"model.to(self.device) model_training = Training( model, criterion, optimizer, self.device) return model_training def get_dataset(self, file):",
"import numpy as np import torch import torch.optim as optim from sklearn.model_selection import",
"train_data = Dataset(X_train, y_train, Type.rnn, matriz_size) test_data = Dataset(X_test, y_test, Type.rnn, matriz_size) elif",
"train_loader, valid_loader, test_loader, train_size, valid_size = self.get_loader( self.type, self.batch_size, X_train, y_train, X_test, y_test,",
"Dataset from xavier.core.training import Training from xavier.core.transformation import get_standard, init_standard from xavier.net.cnn import",
"Rnn(self.device).to(self.device) print(model) elif self.type == Type.cnn: model = Cnn(self.device).to(self.device) # choose optimizer and",
"valid_loader, test_loader, train_size, valid_size, show, filename=path) if accuracy > self.file_accucary[idx]: self.save_model(self.type, self.filenames_models[idx], self.model,",
"= matriz_size self.type = type self.batch_size = batch_size self.learning_rate = learning_rate self.filenames_models =",
"elif self.type == Type.rnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return",
"accuracy = self.__train_model(train_loader, valid_loader, test_loader, train_size, valid_size, show, filename=path) if accuracy > self.file_accucary[idx]:",
"= output_layer self.matriz_size = matriz_size self.device = device self.filenames = filenames self.matriz_size =",
"= Cnn(self.device).to(self.device) # choose optimizer and loss function criterion = nn.CrossEntropyLoss() optimizer =",
"os.path.exists(path): os.mkdir(path) filename = path + '/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({ 'model': model.model.state_dict(), 'standard': standard",
"dtype=np.float64) dataset_x = np.asarray([l[1:] for l in dataset]) dataset_y = np.asarray([l[0] for l",
"standard) return X_train, X_test, standard def get_loader(self, type, batch_size, X_train, y_train, X_test, y_test,",
"Dataset(X_test, y_test, Type.mlp) elif type == Type.rnn: train_data = Dataset(X_train, y_train, Type.rnn, matriz_size)",
"= Dataset(X_test, y_test, Type.mlp) elif type == Type.rnn: train_data = Dataset(X_train, y_train, Type.rnn,",
"valid_size) print('Data train: ') self.model.validation(train_loader, train_size, train=True) print('Data Test: ') return self.model.test(test_loader, show,",
"self.matriz_size) self.input_layer = X_train.shape[1] self.model = self.__build_model() path = self.filenames_models[idx] + \\ self.type.value",
"import torch import torch.optim as optim from sklearn.model_selection import train_test_split from torch import",
"+ 'dataset.csv', delimiter=',', dtype=np.float64) dataset_x = np.asarray([l[1:] for l in dataset]) dataset_y =",
"matriz_size) elif type == Type.cnn: train_data = Dataset(X_train, y_train, Type.cnn, matriz_size) test_data =",
"Model(object): def __init__(self, filenames, filenames_models, device, learning_rate, num_epoch, batch_size, input_layer=0, hidden_layer=0, output_layer=3, matriz_size=0,",
"init_standard from xavier.net.cnn import Cnn from xavier.net.mlp import Mlp from xavier.net.rnn_lstm_2 import Rnn",
"X_test, standard elif self.type == Type.cnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test,",
"dataset_x, dataset_y, test_size=0.2, random_state=21) X_train, X_test, standard = self.get_normalization( X_train, X_test) train_loader, valid_loader,",
"self.type == Type.mlp: model = Mlp(self.device).to(self.device) elif self.type == Type.rnn: model = Rnn(self.device).to(self.device)",
"= Dataset(X_test, y_test, Type.rnn, matriz_size) elif type == Type.cnn: train_data = Dataset(X_train, y_train,",
"= Rnn(self.device).to(self.device) print(model) elif self.type == Type.cnn: model = Cnn(self.device).to(self.device) # choose optimizer",
"get_loader(self, type, batch_size, X_train, y_train, X_test, y_test, matriz_size): if type == Type.mlp: train_data",
"') self.model.validation(train_loader, train_size, train=True) print('Data Test: ') return self.model.test(test_loader, show, filename) def __build_model(self):",
"if self.type == Type.mlp: model = Mlp(self.device).to(self.device) elif self.type == Type.rnn: model =",
"def __train_model(self, train_loader, valid_loader, test_loader, train_size, valid_size, show, filename): self.model.first_time = time.time() for",
"X_test, y_train, y_test = train_test_split( dataset_x, dataset_y, test_size=0.2, random_state=21) X_train, X_test, standard =",
"Cnn(self.device).to(self.device) # choose optimizer and loss function criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(),",
"import DataLoader from xavier.core.dataset import Dataset from xavier.core.training import Training from xavier.core.transformation import",
"train_size, train=True) print('Data Test: ') return self.model.test(test_loader, show, filename) def __build_model(self): # build",
"get_dataset(self, file): dataset = np.loadtxt(file + 'dataset.csv', delimiter=',', dtype=np.float64) dataset_x = np.asarray([l[1:] for",
"__init__(self, filenames, filenames_models, device, learning_rate, num_epoch, batch_size, input_layer=0, hidden_layer=0, output_layer=3, matriz_size=0, type=Type.mlp): self.input_layer",
"= type self.batch_size = batch_size self.learning_rate = learning_rate self.filenames_models = filenames_models self.num_epoch =",
"not os.path.exists(path): os.mkdir(path) filename = path + '/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({ 'model': model.model.state_dict(), 'standard':",
"dataset_x = np.asarray([l[1:] for l in dataset]) dataset_y = np.asarray([l[0] for l in",
"np.loadtxt(file + 'dataset.csv', delimiter=',', dtype=np.float64) dataset_x = np.asarray([l[1:] for l in dataset]) dataset_y",
"create_model(self, times, show): self.file_accucary = np.zeros(len(self.filenames)) for _ in range(times): for idx, file",
"self.matriz_size = matriz_size self.device = device self.filenames = filenames self.matriz_size = matriz_size self.type",
"valid_size, show, filename): self.model.first_time = time.time() for epoch in range(1, self.num_epoch+1): print \"Epoch",
"= path + '/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({ 'model': model.model.state_dict(), 'standard': standard }, filename) def",
"show, filename) def __build_model(self): # build Model if self.type == Type.mlp: model =",
"X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard elif",
"import torch.optim as optim from sklearn.model_selection import train_test_split from torch import nn import",
"== Type.rnn: model = Rnn(self.device).to(self.device) print(model) elif self.type == Type.cnn: model = Cnn(self.device).to(self.device)",
"hidden_layer self.output_layer = output_layer self.matriz_size = matriz_size self.device = device self.filenames = filenames",
"model.model.state_dict(), 'standard': standard }, filename) def create_model(self, times, show): self.file_accucary = np.zeros(len(self.filenames)) for",
"as config from xavier.constants.type import Type from xavier.core.dataLoader import DataLoader from xavier.core.dataset import",
"= init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard def get_loader(self, type,",
"self.input_layer = X_train.shape[1] self.model = self.__build_model() path = self.filenames_models[idx] + \\ self.type.value +",
"criterion, optimizer, self.device) return model_training def get_dataset(self, file): dataset = np.loadtxt(file + 'dataset.csv',",
"= DataLoader() train_loader, valid_loader, train_size, valid_size = dataLoader.get_train( train_data, batch_size) test_loader = dataLoader.get_test(test_data,",
"valid_loader, test_loader, train_size, valid_size def save_model(self, type, path_model, model, acc, standard): # Save",
"\"Epoch \" + str(epoch) + \"/\" + str(self.num_epoch) self.model.train(epoch, train_loader, train_size) self.model.validation(valid_loader, valid_size)",
"standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard def get_loader(self,",
"test_data = Dataset(X_test, y_test, Type.cnn, matriz_size) dataLoader = DataLoader() train_loader, valid_loader, train_size, valid_size",
"test_loader, train_size, valid_size, show, filename=path) if accuracy > self.file_accucary[idx]: self.save_model(self.type, self.filenames_models[idx], self.model, accuracy,",
"X_train, X_test, standard elif self.type == Type.rnn: X_train, standard = init_standard(X_train) X_test =",
"get_standard(X_test, standard) return X_train, X_test, standard elif self.type == Type.rnn: X_train, standard =",
"type=Type.mlp): self.input_layer = input_layer self.hidden_layer = hidden_layer self.output_layer = output_layer self.matriz_size = matriz_size",
"\"/\" + str(self.num_epoch) self.model.train(epoch, train_loader, train_size) self.model.validation(valid_loader, valid_size) print('Data train: ') self.model.validation(train_loader, train_size,",
"import Cnn from xavier.net.mlp import Mlp from xavier.net.rnn_lstm_2 import Rnn torch.manual_seed(1234) class Model(object):",
"elif self.type == Type.rnn: model = Rnn(self.device).to(self.device) print(model) elif self.type == Type.cnn: model",
"function criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=self.learning_rate) model = model.to(self.device) model_training =",
"dataLoader = DataLoader() train_loader, valid_loader, train_size, valid_size = dataLoader.get_train( train_data, batch_size) test_loader =",
"= np.zeros(len(self.filenames)) for _ in range(times): for idx, file in enumerate(self.filenames): print \"\\nTraining",
"type, batch_size, X_train, y_train, X_test, y_test, matriz_size): if type == Type.mlp: train_data =",
"standard elif self.type == Type.rnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard)",
"Rnn torch.manual_seed(1234) class Model(object): def __init__(self, filenames, filenames_models, device, learning_rate, num_epoch, batch_size, input_layer=0,",
"save_model(self, type, path_model, model, acc, standard): # Save the Trained Model path =",
"self.matriz_size = matriz_size self.type = type self.batch_size = batch_size self.learning_rate = learning_rate self.filenames_models",
"return dataset_x, dataset_y def get_normalization(self, X_train, X_test): if self.type == Type.mlp: X_train, standard",
"y_train, X_test, y_test, self.matriz_size) self.input_layer = X_train.shape[1] self.model = self.__build_model() path = self.filenames_models[idx]",
"elif type == Type.rnn: train_data = Dataset(X_train, y_train, Type.rnn, matriz_size) test_data = Dataset(X_test,",
"y_train, y_test = train_test_split( dataset_x, dataset_y, test_size=0.2, random_state=21) X_train, X_test, standard = self.get_normalization(",
"elif self.type == Type.cnn: model = Cnn(self.device).to(self.device) # choose optimizer and loss function",
"= config.VERSION def __train_model(self, train_loader, valid_loader, test_loader, train_size, valid_size, show, filename): self.model.first_time =",
"elif type == Type.cnn: train_data = Dataset(X_train, y_train, Type.cnn, matriz_size) test_data = Dataset(X_test,",
"= self.get_dataset(file) X_train, X_test, y_train, y_test = train_test_split( dataset_x, dataset_y, test_size=0.2, random_state=21) X_train,",
"coding: utf-8 -*- import os import time import numpy as np import torch",
"test_loader = dataLoader.get_test(test_data, batch_size) return train_loader, valid_loader, test_loader, train_size, valid_size def save_model(self, type,",
"torch.optim as optim from sklearn.model_selection import train_test_split from torch import nn import xavier.constants.config",
"X_train, X_test, y_train, y_test = train_test_split( dataset_x, dataset_y, test_size=0.2, random_state=21) X_train, X_test, standard",
"as optim from sklearn.model_selection import train_test_split from torch import nn import xavier.constants.config as",
"config from xavier.constants.type import Type from xavier.core.dataLoader import DataLoader from xavier.core.dataset import Dataset",
"}, filename) def create_model(self, times, show): self.file_accucary = np.zeros(len(self.filenames)) for _ in range(times):",
"self.type == Type.mlp: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train,",
"X_test = get_standard(X_test, standard) return X_train, X_test, standard elif self.type == Type.cnn: X_train,",
"= self.__train_model(train_loader, valid_loader, test_loader, train_size, valid_size, show, filename=path) if accuracy > self.file_accucary[idx]: self.save_model(self.type,",
"from xavier.core.dataLoader import DataLoader from xavier.core.dataset import Dataset from xavier.core.training import Training from",
"X_test = get_standard(X_test, standard) return X_train, X_test, standard elif self.type == Type.rnn: X_train,",
"str(self.num_epoch) self.model.train(epoch, train_loader, train_size) self.model.validation(valid_loader, valid_size) print('Data train: ') self.model.validation(train_loader, train_size, train=True) print('Data",
"valid_loader, test_loader, train_size, valid_size = self.get_loader( self.type, self.batch_size, X_train, y_train, X_test, y_test, self.matriz_size)",
"dataset = np.loadtxt(file + 'dataset.csv', delimiter=',', dtype=np.float64) dataset_x = np.asarray([l[1:] for l in",
"= path_model+type.value if not os.path.exists(path): os.mkdir(path) filename = path + '/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({",
"Type.mlp: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard",
"file in enumerate(self.filenames): print \"\\nTraining dataset: \" + str(file) + \"\\n\" dataset_x, dataset_y",
"self.batch_size, X_train, y_train, X_test, y_test, self.matriz_size) self.input_layer = X_train.shape[1] self.model = self.__build_model() path",
"print(model) elif self.type == Type.cnn: model = Cnn(self.device).to(self.device) # choose optimizer and loss",
"filenames self.matriz_size = matriz_size self.type = type self.batch_size = batch_size self.learning_rate = learning_rate",
"X_train, X_test, standard def get_loader(self, type, batch_size, X_train, y_train, X_test, y_test, matriz_size): if",
"xavier.core.transformation import get_standard, init_standard from xavier.net.cnn import Cnn from xavier.net.mlp import Mlp from",
"dataset_y = np.asarray([l[0] for l in dataset]) return dataset_x, dataset_y def get_normalization(self, X_train,",
"== Type.cnn: train_data = Dataset(X_train, y_train, Type.cnn, matriz_size) test_data = Dataset(X_test, y_test, Type.cnn,",
"self.type, self.batch_size, X_train, y_train, X_test, y_test, self.matriz_size) self.input_layer = X_train.shape[1] self.model = self.__build_model()",
"train_loader, valid_loader, test_loader, train_size, valid_size, show, filename): self.model.first_time = time.time() for epoch in",
"filenames_models, device, learning_rate, num_epoch, batch_size, input_layer=0, hidden_layer=0, output_layer=3, matriz_size=0, type=Type.mlp): self.input_layer = input_layer",
"\"\\nTraining dataset: \" + str(file) + \"\\n\" dataset_x, dataset_y = self.get_dataset(file) X_train, X_test,",
"X_test): if self.type == Type.mlp: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard)",
"xavier.net.rnn_lstm_2 import Rnn torch.manual_seed(1234) class Model(object): def __init__(self, filenames, filenames_models, device, learning_rate, num_epoch,",
"Type.rnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard",
"def __build_model(self): # build Model if self.type == Type.mlp: model = Mlp(self.device).to(self.device) elif",
"print('Data Test: ') return self.model.test(test_loader, show, filename) def __build_model(self): # build Model if",
"standard): # Save the Trained Model path = path_model+type.value if not os.path.exists(path): os.mkdir(path)",
"import Rnn torch.manual_seed(1234) class Model(object): def __init__(self, filenames, filenames_models, device, learning_rate, num_epoch, batch_size,",
"+ str(file) + \"\\n\" dataset_x, dataset_y = self.get_dataset(file) X_train, X_test, y_train, y_test =",
"import nn import xavier.constants.config as config from xavier.constants.type import Type from xavier.core.dataLoader import",
"np.zeros(len(self.filenames)) for _ in range(times): for idx, file in enumerate(self.filenames): print \"\\nTraining dataset:",
"X_test, standard elif self.type == Type.rnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test,",
"train_size, valid_size = self.get_loader( self.type, self.batch_size, X_train, y_train, X_test, y_test, self.matriz_size) self.input_layer =",
"= np.asarray([l[0] for l in dataset]) return dataset_x, dataset_y def get_normalization(self, X_train, X_test):",
"acc, standard): # Save the Trained Model path = path_model+type.value if not os.path.exists(path):",
"filename=path) if accuracy > self.file_accucary[idx]: self.save_model(self.type, self.filenames_models[idx], self.model, accuracy, standard) self.file_accucary[idx] = accuracy",
"matriz_size): if type == Type.mlp: train_data = Dataset(X_train, y_train, Type.mlp) test_data = Dataset(X_test,",
"y_test, matriz_size): if type == Type.mlp: train_data = Dataset(X_train, y_train, Type.mlp) test_data =",
"= hidden_layer self.output_layer = output_layer self.matriz_size = matriz_size self.device = device self.filenames =",
"import Mlp from xavier.net.rnn_lstm_2 import Rnn torch.manual_seed(1234) class Model(object): def __init__(self, filenames, filenames_models,",
"nn import xavier.constants.config as config from xavier.constants.type import Type from xavier.core.dataLoader import DataLoader",
"test_size=0.2, random_state=21) X_train, X_test, standard = self.get_normalization( X_train, X_test) train_loader, valid_loader, test_loader, train_size,",
"== Type.rnn: X_train, standard = init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test,",
"path + '/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({ 'model': model.model.state_dict(), 'standard': standard }, filename) def create_model(self,",
"X_train, X_test) train_loader, valid_loader, test_loader, train_size, valid_size = self.get_loader( self.type, self.batch_size, X_train, y_train,",
"__train_model(self, train_loader, valid_loader, test_loader, train_size, valid_size, show, filename): self.model.first_time = time.time() for epoch",
"+ str(epoch) + \"/\" + str(self.num_epoch) self.model.train(epoch, train_loader, train_size) self.model.validation(valid_loader, valid_size) print('Data train:",
"{:.2f}'.format(acc)+'.pkl' torch.save({ 'model': model.model.state_dict(), 'standard': standard }, filename) def create_model(self, times, show): self.file_accucary",
"xavier.constants.type import Type from xavier.core.dataLoader import DataLoader from xavier.core.dataset import Dataset from xavier.core.training",
"train_size, valid_size def save_model(self, type, path_model, model, acc, standard): # Save the Trained",
"in range(1, self.num_epoch+1): print \"Epoch \" + str(epoch) + \"/\" + str(self.num_epoch) self.model.train(epoch,",
"-*- coding: utf-8 -*- import os import time import numpy as np import",
"in dataset]) dataset_y = np.asarray([l[0] for l in dataset]) return dataset_x, dataset_y def",
"model = model.to(self.device) model_training = Training( model, criterion, optimizer, self.device) return model_training def",
"class Model(object): def __init__(self, filenames, filenames_models, device, learning_rate, num_epoch, batch_size, input_layer=0, hidden_layer=0, output_layer=3,",
"\" + str(file) + \"\\n\" dataset_x, dataset_y = self.get_dataset(file) X_train, X_test, y_train, y_test",
"output_layer self.matriz_size = matriz_size self.device = device self.filenames = filenames self.matriz_size = matriz_size",
"== Type.mlp: train_data = Dataset(X_train, y_train, Type.mlp) test_data = Dataset(X_test, y_test, Type.mlp) elif",
"matriz_size=0, type=Type.mlp): self.input_layer = input_layer self.hidden_layer = hidden_layer self.output_layer = output_layer self.matriz_size =",
"= init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard elif self.type ==",
"self.type == Type.rnn: model = Rnn(self.device).to(self.device) print(model) elif self.type == Type.cnn: model =",
"learning_rate self.filenames_models = filenames_models self.num_epoch = num_epoch self.file_accucary = np.zeros(len(self.filenames)) self.version = config.VERSION",
"y_test, self.matriz_size) self.input_layer = X_train.shape[1] self.model = self.__build_model() path = self.filenames_models[idx] + \\",
"filenames, filenames_models, device, learning_rate, num_epoch, batch_size, input_layer=0, hidden_layer=0, output_layer=3, matriz_size=0, type=Type.mlp): self.input_layer =",
"print('Data train: ') self.model.validation(train_loader, train_size, train=True) print('Data Test: ') return self.model.test(test_loader, show, filename)",
"'standard': standard }, filename) def create_model(self, times, show): self.file_accucary = np.zeros(len(self.filenames)) for _",
"self.file_accucary = np.zeros(len(self.filenames)) self.version = config.VERSION def __train_model(self, train_loader, valid_loader, test_loader, train_size, valid_size,",
"np.zeros(len(self.filenames)) self.version = config.VERSION def __train_model(self, train_loader, valid_loader, test_loader, train_size, valid_size, show, filename):",
"self.type.value + '/'+self.version accuracy = self.__train_model(train_loader, valid_loader, test_loader, train_size, valid_size, show, filename=path) if",
"valid_size = dataLoader.get_train( train_data, batch_size) test_loader = dataLoader.get_test(test_data, batch_size) return train_loader, valid_loader, test_loader,",
"matriz_size self.device = device self.filenames = filenames self.matriz_size = matriz_size self.type = type",
"train_loader, valid_loader, test_loader, train_size, valid_size def save_model(self, type, path_model, model, acc, standard): #",
"self.filenames_models = filenames_models self.num_epoch = num_epoch self.file_accucary = np.zeros(len(self.filenames)) self.version = config.VERSION def",
"standard) return X_train, X_test, standard elif self.type == Type.cnn: X_train, standard = init_standard(X_train)",
"= self.get_normalization( X_train, X_test) train_loader, valid_loader, test_loader, train_size, valid_size = self.get_loader( self.type, self.batch_size,",
"== Type.cnn: model = Cnn(self.device).to(self.device) # choose optimizer and loss function criterion =",
"batch_size) return train_loader, valid_loader, test_loader, train_size, valid_size def save_model(self, type, path_model, model, acc,",
"path = self.filenames_models[idx] + \\ self.type.value + '/'+self.version accuracy = self.__train_model(train_loader, valid_loader, test_loader,",
"self.model.validation(train_loader, train_size, train=True) print('Data Test: ') return self.model.test(test_loader, show, filename) def __build_model(self): #",
"= dataLoader.get_test(test_data, batch_size) return train_loader, valid_loader, test_loader, train_size, valid_size def save_model(self, type, path_model,",
"matriz_size) dataLoader = DataLoader() train_loader, valid_loader, train_size, valid_size = dataLoader.get_train( train_data, batch_size) test_loader",
"test_loader, train_size, valid_size def save_model(self, type, path_model, model, acc, standard): # Save the",
"build Model if self.type == Type.mlp: model = Mlp(self.device).to(self.device) elif self.type == Type.rnn:",
"from xavier.net.mlp import Mlp from xavier.net.rnn_lstm_2 import Rnn torch.manual_seed(1234) class Model(object): def __init__(self,",
"self.num_epoch+1): print \"Epoch \" + str(epoch) + \"/\" + str(self.num_epoch) self.model.train(epoch, train_loader, train_size)",
"= Dataset(X_train, y_train, Type.cnn, matriz_size) test_data = Dataset(X_test, y_test, Type.cnn, matriz_size) dataLoader =",
"Training from xavier.core.transformation import get_standard, init_standard from xavier.net.cnn import Cnn from xavier.net.mlp import",
"self.num_epoch = num_epoch self.file_accucary = np.zeros(len(self.filenames)) self.version = config.VERSION def __train_model(self, train_loader, valid_loader,",
"self.model.test(test_loader, show, filename) def __build_model(self): # build Model if self.type == Type.mlp: model",
"train_size, valid_size, show, filename=path) if accuracy > self.file_accucary[idx]: self.save_model(self.type, self.filenames_models[idx], self.model, accuracy, standard)",
"+ \"\\n\" dataset_x, dataset_y = self.get_dataset(file) X_train, X_test, y_train, y_test = train_test_split( dataset_x,",
"y_test, Type.mlp) elif type == Type.rnn: train_data = Dataset(X_train, y_train, Type.rnn, matriz_size) test_data",
"X_test, y_test, self.matriz_size) self.input_layer = X_train.shape[1] self.model = self.__build_model() path = self.filenames_models[idx] +",
"xavier.core.dataLoader import DataLoader from xavier.core.dataset import Dataset from xavier.core.training import Training from xavier.core.transformation",
"DataLoader() train_loader, valid_loader, train_size, valid_size = dataLoader.get_train( train_data, batch_size) test_loader = dataLoader.get_test(test_data, batch_size)",
"valid_size def save_model(self, type, path_model, model, acc, standard): # Save the Trained Model",
"from xavier.net.rnn_lstm_2 import Rnn torch.manual_seed(1234) class Model(object): def __init__(self, filenames, filenames_models, device, learning_rate,",
"init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard elif self.type == Type.cnn:",
"y_test, Type.cnn, matriz_size) dataLoader = DataLoader() train_loader, valid_loader, train_size, valid_size = dataLoader.get_train( train_data,",
"show): self.file_accucary = np.zeros(len(self.filenames)) for _ in range(times): for idx, file in enumerate(self.filenames):",
"model = Mlp(self.device).to(self.device) elif self.type == Type.rnn: model = Rnn(self.device).to(self.device) print(model) elif self.type",
"Model if self.type == Type.mlp: model = Mlp(self.device).to(self.device) elif self.type == Type.rnn: model",
"type == Type.mlp: train_data = Dataset(X_train, y_train, Type.mlp) test_data = Dataset(X_test, y_test, Type.mlp)",
"train_loader, valid_loader, train_size, valid_size = dataLoader.get_train( train_data, batch_size) test_loader = dataLoader.get_test(test_data, batch_size) return",
"X_test = get_standard(X_test, standard) return X_train, X_test, standard def get_loader(self, type, batch_size, X_train,",
"np import torch import torch.optim as optim from sklearn.model_selection import train_test_split from torch",
"+ '/'+self.version+' {:.2f}'.format(acc)+'.pkl' torch.save({ 'model': model.model.state_dict(), 'standard': standard }, filename) def create_model(self, times,",
"import Dataset from xavier.core.training import Training from xavier.core.transformation import get_standard, init_standard from xavier.net.cnn",
"= device self.filenames = filenames self.matriz_size = matriz_size self.type = type self.batch_size =",
"filename) def create_model(self, times, show): self.file_accucary = np.zeros(len(self.filenames)) for _ in range(times): for",
"init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard def get_loader(self, type, batch_size,",
"#!/usr/bin/env python # -*- coding: utf-8 -*- import os import time import numpy",
"get_standard, init_standard from xavier.net.cnn import Cnn from xavier.net.mlp import Mlp from xavier.net.rnn_lstm_2 import",
"delimiter=',', dtype=np.float64) dataset_x = np.asarray([l[1:] for l in dataset]) dataset_y = np.asarray([l[0] for",
"Type.mlp) elif type == Type.rnn: train_data = Dataset(X_train, y_train, Type.rnn, matriz_size) test_data =",
"input_layer=0, hidden_layer=0, output_layer=3, matriz_size=0, type=Type.mlp): self.input_layer = input_layer self.hidden_layer = hidden_layer self.output_layer =",
"Type.cnn: train_data = Dataset(X_train, y_train, Type.cnn, matriz_size) test_data = Dataset(X_test, y_test, Type.cnn, matriz_size)",
"for _ in range(times): for idx, file in enumerate(self.filenames): print \"\\nTraining dataset: \"",
"') return self.model.test(test_loader, show, filename) def __build_model(self): # build Model if self.type ==",
"train_test_split( dataset_x, dataset_y, test_size=0.2, random_state=21) X_train, X_test, standard = self.get_normalization( X_train, X_test) train_loader,",
"show, filename=path) if accuracy > self.file_accucary[idx]: self.save_model(self.type, self.filenames_models[idx], self.model, accuracy, standard) self.file_accucary[idx] =",
"'model': model.model.state_dict(), 'standard': standard }, filename) def create_model(self, times, show): self.file_accucary = np.zeros(len(self.filenames))",
"self.filenames = filenames self.matriz_size = matriz_size self.type = type self.batch_size = batch_size self.learning_rate",
"X_train, X_test): if self.type == Type.mlp: X_train, standard = init_standard(X_train) X_test = get_standard(X_test,",
"= learning_rate self.filenames_models = filenames_models self.num_epoch = num_epoch self.file_accucary = np.zeros(len(self.filenames)) self.version =",
"Type.mlp: train_data = Dataset(X_train, y_train, Type.mlp) test_data = Dataset(X_test, y_test, Type.mlp) elif type",
"y_test, Type.rnn, matriz_size) elif type == Type.cnn: train_data = Dataset(X_train, y_train, Type.cnn, matriz_size)",
"choose optimizer and loss function criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=self.learning_rate) model",
"str(epoch) + \"/\" + str(self.num_epoch) self.model.train(epoch, train_loader, train_size) self.model.validation(valid_loader, valid_size) print('Data train: ')",
"matriz_size) test_data = Dataset(X_test, y_test, Type.rnn, matriz_size) elif type == Type.cnn: train_data =",
"xavier.core.dataset import Dataset from xavier.core.training import Training from xavier.core.transformation import get_standard, init_standard from",
"-*- import os import time import numpy as np import torch import torch.optim",
"torch.manual_seed(1234) class Model(object): def __init__(self, filenames, filenames_models, device, learning_rate, num_epoch, batch_size, input_layer=0, hidden_layer=0,",
"init_standard(X_train) X_test = get_standard(X_test, standard) return X_train, X_test, standard elif self.type == Type.rnn:"
] |
[
"input, the logic in the # strategy is slightly different in this case",
"are closer to the initial value for x in range(0, 3): self.assertTrue(0.24 <",
"are closer to the target one for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]):",
"class StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self): strategy = StreamGain(np.float32(0.25)) interval = TimeInterval() interval.start_in_bars = 0",
"self.assertTrue(0.24 < out_stream_buffer[x] > 0.45) strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]], [[]]) # now we",
"= 1 in_stream_buffer = np.ones(shape=(100,), dtype=np.float32) out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32) setter_event: ParameterValueEvent =",
"few samples are closer to the initial value for x in range(0, 3):",
"in the # strategy is slightly different in this case for x in",
"[] strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer]) # the first few samples are closer",
"import unittest from typing import List import numpy as np from py_headless_daw.processing.stream.stream_gain import",
"numpy as np from py_headless_daw.processing.stream.stream_gain import StreamGain from py_headless_daw.schema.dto.time_interval import TimeInterval from py_headless_daw.schema.events.event",
"# the first few samples are closer to the initial value for x",
"while the last few are closer to the target one for x in",
"3, out_stream_buffer.shape[0]): self.assertTrue(0.24 < out_stream_buffer[x] > 0.45) strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]], [[]]) #",
"dtype=np.float32) out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32) setter_event: ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer: List[Event]",
"py_headless_daw.processing.stream.stream_gain import StreamGain from py_headless_daw.schema.dto.time_interval import TimeInterval from py_headless_daw.schema.events.event import Event from py_headless_daw.schema.events.parameter_value_event",
"x in range(0, 3): self.assertTrue(0.24 < out_stream_buffer[x] < 0.26) # while the last",
"in range(0, 3): self.assertTrue(0.24 < out_stream_buffer[x] < 0.26) # while the last few",
"in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(0.24 < out_stream_buffer[x] > 0.45) strategy.render(interval, [in_stream_buffer], [out_stream_buffer],",
"[in_stream_buffer], [out_stream_buffer], [[]], [[]]) # now we render without any events in the",
"py_headless_daw.schema.events.event import Event from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self): strategy =",
"samples are closer to the initial value for x in range(0, 3): self.assertTrue(0.24",
"the last few are closer to the target one for x in range(out_stream_buffer.shape[0]",
"we render without any events in the input, the logic in the #",
"= [setter_event] output_event_buffer: List[Event] = [] strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer]) # the",
"0.45) strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]], [[]]) # now we render without any events",
"range(0, 3): self.assertTrue(0.24 < out_stream_buffer[x] < 0.26) # while the last few are",
"> 0.45) strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]], [[]]) # now we render without any",
"1 in_stream_buffer = np.ones(shape=(100,), dtype=np.float32) out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32) setter_event: ParameterValueEvent = ParameterValueEvent(0,",
"strategy is slightly different in this case for x in range(out_stream_buffer.shape[0] - 3,",
"out_stream_buffer[x] < 0.26) # while the last few are closer to the target",
"np.zeros(shape=(100,), dtype=np.float32) setter_event: ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer: List[Event] = [setter_event] output_event_buffer:",
"[input_event_buffer], [output_event_buffer]) # the first few samples are closer to the initial value",
"import Event from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self): strategy = StreamGain(np.float32(0.25))",
"ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self): strategy = StreamGain(np.float32(0.25)) interval = TimeInterval() interval.start_in_bars =",
"[[]], [[]]) # now we render without any events in the input, the",
"the # strategy is slightly different in this case for x in range(out_stream_buffer.shape[0]",
"TimeInterval from py_headless_daw.schema.events.event import Event from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self):",
"List import numpy as np from py_headless_daw.processing.stream.stream_gain import StreamGain from py_headless_daw.schema.dto.time_interval import TimeInterval",
"for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(0.24 < out_stream_buffer[x] > 0.45) strategy.render(interval,",
"target one for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(0.24 < out_stream_buffer[x] >",
"import ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self): strategy = StreamGain(np.float32(0.25)) interval = TimeInterval() interval.start_in_bars",
"to the initial value for x in range(0, 3): self.assertTrue(0.24 < out_stream_buffer[x] <",
"np from py_headless_daw.processing.stream.stream_gain import StreamGain from py_headless_daw.schema.dto.time_interval import TimeInterval from py_headless_daw.schema.events.event import Event",
"interval = TimeInterval() interval.start_in_bars = 0 interval.end_in_bars = 1 in_stream_buffer = np.ones(shape=(100,), dtype=np.float32)",
"[in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer]) # the first few samples are closer to the",
"[[]]) # now we render without any events in the input, the logic",
"the initial value for x in range(0, 3): self.assertTrue(0.24 < out_stream_buffer[x] < 0.26)",
"now we render without any events in the input, the logic in the",
"value for x in range(0, 3): self.assertTrue(0.24 < out_stream_buffer[x] < 0.26) # while",
"closer to the initial value for x in range(0, 3): self.assertTrue(0.24 < out_stream_buffer[x]",
"List[Event] = [] strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer]) # the first few samples",
"interval.end_in_bars = 1 in_stream_buffer = np.ones(shape=(100,), dtype=np.float32) out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32) setter_event: ParameterValueEvent",
"for x in range(0, 3): self.assertTrue(0.24 < out_stream_buffer[x] < 0.26) # while the",
"in this case for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(out_stream_buffer[x] > 0.45)",
"# now we render without any events in the input, the logic in",
"any events in the input, the logic in the # strategy is slightly",
"one for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(0.24 < out_stream_buffer[x] > 0.45)",
"# strategy is slightly different in this case for x in range(out_stream_buffer.shape[0] -",
"Event from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self): strategy = StreamGain(np.float32(0.25)) interval",
"StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer: List[Event] = [setter_event] output_event_buffer: List[Event] = [] strategy.render(interval, [in_stream_buffer], [out_stream_buffer],",
"last few are closer to the target one for x in range(out_stream_buffer.shape[0] -",
"setter_event: ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer: List[Event] = [setter_event] output_event_buffer: List[Event] =",
"without any events in the input, the logic in the # strategy is",
"[out_stream_buffer], [[]], [[]]) # now we render without any events in the input,",
"first few samples are closer to the initial value for x in range(0,",
"from py_headless_daw.processing.stream.stream_gain import StreamGain from py_headless_daw.schema.dto.time_interval import TimeInterval from py_headless_daw.schema.events.event import Event from",
"output_event_buffer: List[Event] = [] strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer]) # the first few",
"strategy = StreamGain(np.float32(0.25)) interval = TimeInterval() interval.start_in_bars = 0 interval.end_in_bars = 1 in_stream_buffer",
"in_stream_buffer = np.ones(shape=(100,), dtype=np.float32) out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32) setter_event: ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN,",
"range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(0.24 < out_stream_buffer[x] > 0.45) strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]],",
"0 interval.end_in_bars = 1 in_stream_buffer = np.ones(shape=(100,), dtype=np.float32) out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32) setter_event:",
"import numpy as np from py_headless_daw.processing.stream.stream_gain import StreamGain from py_headless_daw.schema.dto.time_interval import TimeInterval from",
"as np from py_headless_daw.processing.stream.stream_gain import StreamGain from py_headless_daw.schema.dto.time_interval import TimeInterval from py_headless_daw.schema.events.event import",
"[setter_event] output_event_buffer: List[Event] = [] strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer]) # the first",
"different in this case for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(out_stream_buffer[x] >",
"= [] strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer]) # the first few samples are",
"import StreamGain from py_headless_daw.schema.dto.time_interval import TimeInterval from py_headless_daw.schema.events.event import Event from py_headless_daw.schema.events.parameter_value_event import",
"[out_stream_buffer], [input_event_buffer], [output_event_buffer]) # the first few samples are closer to the initial",
"from py_headless_daw.schema.dto.time_interval import TimeInterval from py_headless_daw.schema.events.event import Event from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent class",
"0.55) input_event_buffer: List[Event] = [setter_event] output_event_buffer: List[Event] = [] strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer],",
"out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32) setter_event: ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer: List[Event] =",
"slightly different in this case for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(out_stream_buffer[x]",
"< out_stream_buffer[x] > 0.45) strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]], [[]]) # now we render",
"3): self.assertTrue(0.24 < out_stream_buffer[x] < 0.26) # while the last few are closer",
"= np.ones(shape=(100,), dtype=np.float32) out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32) setter_event: ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55)",
"events in the input, the logic in the # strategy is slightly different",
"= TimeInterval() interval.start_in_bars = 0 interval.end_in_bars = 1 in_stream_buffer = np.ones(shape=(100,), dtype=np.float32) out_stream_buffer",
"StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self): strategy = StreamGain(np.float32(0.25)) interval = TimeInterval() interval.start_in_bars = 0 interval.end_in_bars",
"interval.start_in_bars = 0 interval.end_in_bars = 1 in_stream_buffer = np.ones(shape=(100,), dtype=np.float32) out_stream_buffer = np.zeros(shape=(100,),",
"few are closer to the target one for x in range(out_stream_buffer.shape[0] - 3,",
"closer to the target one for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(0.24",
"from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self): strategy = StreamGain(np.float32(0.25)) interval =",
"in the input, the logic in the # strategy is slightly different in",
"= 0 interval.end_in_bars = 1 in_stream_buffer = np.ones(shape=(100,), dtype=np.float32) out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32)",
"< out_stream_buffer[x] < 0.26) # while the last few are closer to the",
"ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer: List[Event] = [setter_event] output_event_buffer: List[Event] = []",
"the first few samples are closer to the initial value for x in",
"[output_event_buffer]) # the first few samples are closer to the initial value for",
"import TimeInterval from py_headless_daw.schema.events.event import Event from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase): def",
"py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self): strategy = StreamGain(np.float32(0.25)) interval = TimeInterval()",
"out_stream_buffer.shape[0]): self.assertTrue(0.24 < out_stream_buffer[x] > 0.45) strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]], [[]]) # now",
"x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(0.24 < out_stream_buffer[x] > 0.45) strategy.render(interval, [in_stream_buffer],",
"the logic in the # strategy is slightly different in this case for",
"py_headless_daw.schema.dto.time_interval import TimeInterval from py_headless_daw.schema.events.event import Event from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase):",
"render without any events in the input, the logic in the # strategy",
"0.26) # while the last few are closer to the target one for",
"out_stream_buffer[x] > 0.45) strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]], [[]]) # now we render without",
"= StreamGain(np.float32(0.25)) interval = TimeInterval() interval.start_in_bars = 0 interval.end_in_bars = 1 in_stream_buffer =",
"unittest from typing import List import numpy as np from py_headless_daw.processing.stream.stream_gain import StreamGain",
"to the target one for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(0.24 <",
"the input, the logic in the # strategy is slightly different in this",
"test_stream_gain_strategy(self): strategy = StreamGain(np.float32(0.25)) interval = TimeInterval() interval.start_in_bars = 0 interval.end_in_bars = 1",
"self.assertTrue(0.24 < out_stream_buffer[x] < 0.26) # while the last few are closer to",
"import List import numpy as np from py_headless_daw.processing.stream.stream_gain import StreamGain from py_headless_daw.schema.dto.time_interval import",
"typing import List import numpy as np from py_headless_daw.processing.stream.stream_gain import StreamGain from py_headless_daw.schema.dto.time_interval",
"strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer]) # the first few samples are closer to",
"logic in the # strategy is slightly different in this case for x",
"# while the last few are closer to the target one for x",
"= np.zeros(shape=(100,), dtype=np.float32) setter_event: ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer: List[Event] = [setter_event]",
"from typing import List import numpy as np from py_headless_daw.processing.stream.stream_gain import StreamGain from",
"StreamGain(np.float32(0.25)) interval = TimeInterval() interval.start_in_bars = 0 interval.end_in_bars = 1 in_stream_buffer = np.ones(shape=(100,),",
"from py_headless_daw.schema.events.event import Event from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent class StreamGainStrategyTest(unittest.TestCase): def test_stream_gain_strategy(self): strategy",
"np.ones(shape=(100,), dtype=np.float32) out_stream_buffer = np.zeros(shape=(100,), dtype=np.float32) setter_event: ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer:",
"dtype=np.float32) setter_event: ParameterValueEvent = ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer: List[Event] = [setter_event] output_event_buffer: List[Event]",
"input_event_buffer: List[Event] = [setter_event] output_event_buffer: List[Event] = [] strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer])",
"< 0.26) # while the last few are closer to the target one",
"TimeInterval() interval.start_in_bars = 0 interval.end_in_bars = 1 in_stream_buffer = np.ones(shape=(100,), dtype=np.float32) out_stream_buffer =",
"StreamGain from py_headless_daw.schema.dto.time_interval import TimeInterval from py_headless_daw.schema.events.event import Event from py_headless_daw.schema.events.parameter_value_event import ParameterValueEvent",
"- 3, out_stream_buffer.shape[0]): self.assertTrue(0.24 < out_stream_buffer[x] > 0.45) strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]], [[]])",
"ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer: List[Event] = [setter_event] output_event_buffer: List[Event] = [] strategy.render(interval, [in_stream_buffer],",
"initial value for x in range(0, 3): self.assertTrue(0.24 < out_stream_buffer[x] < 0.26) #",
"List[Event] = [setter_event] output_event_buffer: List[Event] = [] strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [input_event_buffer], [output_event_buffer]) #",
"the target one for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]): self.assertTrue(0.24 < out_stream_buffer[x]",
"is slightly different in this case for x in range(out_stream_buffer.shape[0] - 3, out_stream_buffer.shape[0]):",
"strategy.render(interval, [in_stream_buffer], [out_stream_buffer], [[]], [[]]) # now we render without any events in",
"def test_stream_gain_strategy(self): strategy = StreamGain(np.float32(0.25)) interval = TimeInterval() interval.start_in_bars = 0 interval.end_in_bars =",
"= ParameterValueEvent(0, StreamGain.PARAMETER_GAIN, 0.55) input_event_buffer: List[Event] = [setter_event] output_event_buffer: List[Event] = [] strategy.render(interval,"
] |
[
"def hello(request): return render(request, 'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None) def login(request): return render(request, 'home/login.html')",
"import user_passes_test # Create your views here. def not_logged_in(user): return not user.is_authenticated def",
"render from django.contrib.auth.decorators import user_passes_test # Create your views here. def not_logged_in(user): return",
"return not user.is_authenticated def base(request): return render(request, 'home/base.html') def home(request): return render(request, 'home/home.html')",
"return render(request, 'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None) def login(request): return render(request, 'home/login.html') def rules(request):",
"def not_logged_in(user): return not user.is_authenticated def base(request): return render(request, 'home/base.html') def home(request): return",
"return render(request, 'home/base.html') def home(request): return render(request, 'home/home.html') def hello(request): return render(request, 'home/hello.html')",
"user_passes_test # Create your views here. def not_logged_in(user): return not user.is_authenticated def base(request):",
"here. def not_logged_in(user): return not user.is_authenticated def base(request): return render(request, 'home/base.html') def home(request):",
"home(request): return render(request, 'home/home.html') def hello(request): return render(request, 'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None) def",
"render(request, 'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None) def login(request): return render(request, 'home/login.html') def rules(request): return",
"from django.shortcuts import render from django.contrib.auth.decorators import user_passes_test # Create your views here.",
"def home(request): return render(request, 'home/home.html') def hello(request): return render(request, 'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None)",
"render(request, 'home/home.html') def hello(request): return render(request, 'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None) def login(request): return",
"Create your views here. def not_logged_in(user): return not user.is_authenticated def base(request): return render(request,",
"your views here. def not_logged_in(user): return not user.is_authenticated def base(request): return render(request, 'home/base.html')",
"import render from django.contrib.auth.decorators import user_passes_test # Create your views here. def not_logged_in(user):",
"'home/home.html') def hello(request): return render(request, 'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None) def login(request): return render(request,",
"<reponame>sa-y-an/Qriosity2.0<gh_stars>0 from django.shortcuts import render from django.contrib.auth.decorators import user_passes_test # Create your views",
"# Create your views here. def not_logged_in(user): return not user.is_authenticated def base(request): return",
"user.is_authenticated def base(request): return render(request, 'home/base.html') def home(request): return render(request, 'home/home.html') def hello(request):",
"base(request): return render(request, 'home/base.html') def home(request): return render(request, 'home/home.html') def hello(request): return render(request,",
"render(request, 'home/base.html') def home(request): return render(request, 'home/home.html') def hello(request): return render(request, 'home/hello.html') @user_passes_test(not_logged_in,",
"'home/base.html') def home(request): return render(request, 'home/home.html') def hello(request): return render(request, 'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard',",
"django.contrib.auth.decorators import user_passes_test # Create your views here. def not_logged_in(user): return not user.is_authenticated",
"@user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None) def login(request): return render(request, 'home/login.html') def rules(request): return render(request, 'home/rule.html')",
"hello(request): return render(request, 'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None) def login(request): return render(request, 'home/login.html') def",
"'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None) def login(request): return render(request, 'home/login.html') def rules(request): return render(request,",
"django.shortcuts import render from django.contrib.auth.decorators import user_passes_test # Create your views here. def",
"from django.contrib.auth.decorators import user_passes_test # Create your views here. def not_logged_in(user): return not",
"not user.is_authenticated def base(request): return render(request, 'home/base.html') def home(request): return render(request, 'home/home.html') def",
"views here. def not_logged_in(user): return not user.is_authenticated def base(request): return render(request, 'home/base.html') def",
"def base(request): return render(request, 'home/base.html') def home(request): return render(request, 'home/home.html') def hello(request): return",
"not_logged_in(user): return not user.is_authenticated def base(request): return render(request, 'home/base.html') def home(request): return render(request,",
"return render(request, 'home/home.html') def hello(request): return render(request, 'home/hello.html') @user_passes_test(not_logged_in, login_url='/user/dashboard', redirect_field_name=None) def login(request):"
] |
[
"tuple): ''' Inserts a new record in warehouse table Parameters ---------- data_tuple: tuple",
"cursor cursor.close() def update(self, id: str, new_data: tuple): ''' Updates a record of",
"FROM warehouse WHERE id = ? ''' # executing script cursor.execute(sql_script, (id,)) self.conn.commit()",
"Deletes a record from warehouse table Parameters ---------- id: str ''' # aquiring",
"in the db data_tuple: tuple tuple of new values (name, location) ''' #",
"warehouse table Parameters ---------- data_tuple: tuple tuple of values (name, location) ''' #",
"= ''' DELETE FROM warehouse WHERE id = ? ''' # executing script",
"cursor.execute(sql_script, new_data) self.conn.commit() # conceding cursor cursor.close() def getByID(self, id: str): ''' gets",
"(*new_data, id) cursor.execute(sql_script, new_data) self.conn.commit() # conceding cursor cursor.close() def getByID(self, id: str):",
"order: str Default = 'asc' arrangement of the returned query ASC: ascending order",
"# conceding cursor cursor.close() return id def delete(self, id: str): ''' Deletes a",
"return query def getAll(self, order: str = 'ASC'): ''' gets a record from",
"from models import BaseModel, gen_id class WarehouseModel(BaseModel): ''' ORM for Warehouse table with",
"= self.conn.cursor() # sql script sql_script = ''' UPDATE warehouse SET name =",
"sql_script = f''' SELECT * FROM warehouse ORDER BY name {order} ''' #",
"''' UPDATE warehouse SET name = ? , location = ? WHERE id",
"FROM warehouse WHERE id = ? ''' # executing script cursor.execute(sql_script, (id,)) query",
"''' # executing script cursor.execute(sql_script) query = cursor.fetchall() # conceding cursor cursor.close() return",
"= cursor.fetchone() # conceding cursor cursor.close() return query def getAll(self, order: str =",
"cursor.close() return id def delete(self, id: str): ''' Deletes a record from warehouse",
"order Returns ------- query: list results list ''' # aquiring cursor cursor =",
"'asc' arrangement of the returned query ASC: ascending order DESC: descending order Returns",
"def delete(self, id: str): ''' Deletes a record from warehouse table Parameters ----------",
"conceding cursor cursor.close() def getByID(self, id: str): ''' gets a record from the",
"Parameters ---------- id: str ''' # aquiring cursor cursor = self.conn.cursor() # sql",
"cursor cursor = self.conn.cursor() # sql script sql_script = ''' SELECT * FROM",
"def update(self, id: str, new_data: tuple): ''' Updates a record of the warehouse",
"a record of the warehouse table using id Parameters ---------- id: str id",
"cursor cursor = self.conn.cursor() # sql script sql_script = ''' DELETE FROM warehouse",
"warehouse( id CHARACTER(10) NOT NULL PRIMARY KEY, name TEXT NOT NULL, location TEXT",
"id = ? ''' # executing script cursor.execute(sql_script, (id,)) self.conn.commit() # conceding cursor",
"VALUES (?, ?, ?) ''' # executing script id = gen_id() data_tuple =",
"cursor = self.conn.cursor() # sql script sql_script = ''' DELETE FROM warehouse WHERE",
"id Parameters ---------- id: str id of the record in the db Returns",
"the warehouse table using id Parameters ---------- id: str id of the record",
"warehouse table using id Parameters ---------- order: str Default = 'asc' arrangement of",
"---------- data_tuple: tuple tuple of values (name, location) ''' # aquiring cursor cursor",
"the db Returns ------- query: tuple represents the result ''' # aquiring cursor",
"DELETE FROM warehouse WHERE id = ? ''' # executing script cursor.execute(sql_script, (id,))",
"script sql_script = f''' SELECT * FROM warehouse ORDER BY name {order} '''",
"self.conn.commit() # conceding cursor cursor.close() return id def delete(self, id: str): ''' Deletes",
"name {order} ''' # executing script cursor.execute(sql_script) query = cursor.fetchall() # conceding cursor",
"id = ? ''' # executing script cursor.execute(sql_script, (id,)) query = cursor.fetchone() #",
"# aquiring cursor cursor = self.conn.cursor() # sql script sql_script = ''' UPDATE",
"self.conn.cursor() # sql script sql_script = ''' INSERT INTO warehouse VALUES (?, ?,",
"NULL PRIMARY KEY, name TEXT NOT NULL, location TEXT NOT NULL ); '''",
"for Warehouse table with the following structure warehouse( id CHARACTER(10) NOT NULL PRIMARY",
"self).__init__(db_filename) def insert(self, data_tuple: tuple): ''' Inserts a new record in warehouse table",
"# aquiring cursor cursor = self.conn.cursor() # sql script sql_script = ''' INSERT",
"values (name, location) ''' # aquiring cursor cursor = self.conn.cursor() # sql script",
"''' INSERT INTO warehouse VALUES (?, ?, ?) ''' # executing script id",
"SELECT * FROM warehouse WHERE id = ? ''' # executing script cursor.execute(sql_script,",
"new values (name, location) ''' # aquiring cursor cursor = self.conn.cursor() # sql",
"(id,)) self.conn.commit() # conceding cursor cursor.close() def update(self, id: str, new_data: tuple): '''",
"---------- id: str ''' # aquiring cursor cursor = self.conn.cursor() # sql script",
"the db data_tuple: tuple tuple of new values (name, location) ''' # aquiring",
"of the returned query ASC: ascending order DESC: descending order Returns ------- query:",
"= self.conn.cursor() # sql script sql_script = ''' DELETE FROM warehouse WHERE id",
"db_filename: str): super(WarehouseModel, self).__init__(db_filename) def insert(self, data_tuple: tuple): ''' Inserts a new record",
"? , location = ? WHERE id = ? ''' # executing script",
"in the db Returns ------- query: tuple represents the result ''' # aquiring",
"*data_tuple) cursor.execute(sql_script, data_tuple) self.conn.commit() # conceding cursor cursor.close() return id def delete(self, id:",
"cursor cursor.close() return id def delete(self, id: str): ''' Deletes a record from",
"'ASC'): ''' gets a record from the warehouse table using id Parameters ----------",
"cursor = self.conn.cursor() # sql script sql_script = ''' INSERT INTO warehouse VALUES",
"aquiring cursor cursor = self.conn.cursor() # sql script sql_script = f''' SELECT *",
"''' DELETE FROM warehouse WHERE id = ? ''' # executing script cursor.execute(sql_script,",
"from the warehouse table using id Parameters ---------- order: str Default = 'asc'",
"warehouse WHERE id = ? ''' # executing script cursor.execute(sql_script, (id,)) self.conn.commit() #",
"data_tuple: tuple tuple of new values (name, location) ''' # aquiring cursor cursor",
"getAll(self, order: str = 'ASC'): ''' gets a record from the warehouse table",
"''' gets a record from the warehouse table using id Parameters ---------- id:",
"class WarehouseModel(BaseModel): ''' ORM for Warehouse table with the following structure warehouse( id",
"script cursor.execute(sql_script, (id,)) self.conn.commit() # conceding cursor cursor.close() def update(self, id: str, new_data:",
"# aquiring cursor cursor = self.conn.cursor() # sql script sql_script = ''' DELETE",
"# executing script cursor.execute(sql_script, (id,)) query = cursor.fetchone() # conceding cursor cursor.close() return",
"---------- id: str id of the record in the db Returns ------- query:",
"table using id Parameters ---------- id: str id of the record in the",
"''' Deletes a record from warehouse table Parameters ---------- id: str ''' #",
"with the following structure warehouse( id CHARACTER(10) NOT NULL PRIMARY KEY, name TEXT",
"cursor.execute(sql_script, (id,)) query = cursor.fetchone() # conceding cursor cursor.close() return query def getAll(self,",
"data_tuple = (id, *data_tuple) cursor.execute(sql_script, data_tuple) self.conn.commit() # conceding cursor cursor.close() return id",
"cursor = self.conn.cursor() # sql script sql_script = ''' UPDATE warehouse SET name",
"= 'asc' arrangement of the returned query ASC: ascending order DESC: descending order",
"delete(self, id: str): ''' Deletes a record from warehouse table Parameters ---------- id:",
"the record in the db Returns ------- query: tuple represents the result '''",
"script sql_script = ''' SELECT * FROM warehouse WHERE id = ? '''",
"''' # executing script id = gen_id() data_tuple = (id, *data_tuple) cursor.execute(sql_script, data_tuple)",
"# conceding cursor cursor.close() return query def getAll(self, order: str = 'ASC'): '''",
"?, ?) ''' # executing script id = gen_id() data_tuple = (id, *data_tuple)",
"name TEXT NOT NULL, location TEXT NOT NULL ); ''' def __init__(self, db_filename:",
"from the warehouse table using id Parameters ---------- id: str id of the",
"WHERE id = ? ''' # executing script cursor.execute(sql_script, (id,)) query = cursor.fetchone()",
"''' ORM for Warehouse table with the following structure warehouse( id CHARACTER(10) NOT",
"); ''' def __init__(self, db_filename: str): super(WarehouseModel, self).__init__(db_filename) def insert(self, data_tuple: tuple): '''",
"warehouse ORDER BY name {order} ''' # executing script cursor.execute(sql_script) query = cursor.fetchall()",
"id = ? ''' # executing script new_data = (*new_data, id) cursor.execute(sql_script, new_data)",
"= ''' INSERT INTO warehouse VALUES (?, ?, ?) ''' # executing script",
"# executing script cursor.execute(sql_script, (id,)) self.conn.commit() # conceding cursor cursor.close() def update(self, id:",
"= ? ''' # executing script cursor.execute(sql_script, (id,)) query = cursor.fetchone() # conceding",
"TEXT NOT NULL ); ''' def __init__(self, db_filename: str): super(WarehouseModel, self).__init__(db_filename) def insert(self,",
"ASC: ascending order DESC: descending order Returns ------- query: list results list '''",
"location TEXT NOT NULL ); ''' def __init__(self, db_filename: str): super(WarehouseModel, self).__init__(db_filename) def",
"record in the db Returns ------- query: tuple represents the result ''' #",
"# sql script sql_script = ''' UPDATE warehouse SET name = ? ,",
"''' def __init__(self, db_filename: str): super(WarehouseModel, self).__init__(db_filename) def insert(self, data_tuple: tuple): ''' Inserts",
"result ''' # aquiring cursor cursor = self.conn.cursor() # sql script sql_script =",
"WHERE id = ? ''' # executing script cursor.execute(sql_script, (id,)) self.conn.commit() # conceding",
"cursor.fetchone() # conceding cursor cursor.close() return query def getAll(self, order: str = 'ASC'):",
"the returned query ASC: ascending order DESC: descending order Returns ------- query: list",
"conceding cursor cursor.close() return id def delete(self, id: str): ''' Deletes a record",
"DESC: descending order Returns ------- query: list results list ''' # aquiring cursor",
"<filename>backend/src/models/Warehouse.py from models import BaseModel, gen_id class WarehouseModel(BaseModel): ''' ORM for Warehouse table",
"script new_data = (*new_data, id) cursor.execute(sql_script, new_data) self.conn.commit() # conceding cursor cursor.close() def",
"# sql script sql_script = ''' INSERT INTO warehouse VALUES (?, ?, ?)",
"warehouse SET name = ? , location = ? WHERE id = ?",
"script cursor.execute(sql_script, (id,)) query = cursor.fetchone() # conceding cursor cursor.close() return query def",
"tuple of values (name, location) ''' # aquiring cursor cursor = self.conn.cursor() #",
"self.conn.cursor() # sql script sql_script = ''' DELETE FROM warehouse WHERE id =",
"# sql script sql_script = ''' SELECT * FROM warehouse WHERE id =",
"following structure warehouse( id CHARACTER(10) NOT NULL PRIMARY KEY, name TEXT NOT NULL,",
"# conceding cursor cursor.close() def getByID(self, id: str): ''' gets a record from",
"return id def delete(self, id: str): ''' Deletes a record from warehouse table",
"str): super(WarehouseModel, self).__init__(db_filename) def insert(self, data_tuple: tuple): ''' Inserts a new record in",
"* FROM warehouse WHERE id = ? ''' # executing script cursor.execute(sql_script, (id,))",
"data_tuple) self.conn.commit() # conceding cursor cursor.close() return id def delete(self, id: str): '''",
"FROM warehouse ORDER BY name {order} ''' # executing script cursor.execute(sql_script) query =",
"NULL, location TEXT NOT NULL ); ''' def __init__(self, db_filename: str): super(WarehouseModel, self).__init__(db_filename)",
"NULL ); ''' def __init__(self, db_filename: str): super(WarehouseModel, self).__init__(db_filename) def insert(self, data_tuple: tuple):",
"= (*new_data, id) cursor.execute(sql_script, new_data) self.conn.commit() # conceding cursor cursor.close() def getByID(self, id:",
"record from the warehouse table using id Parameters ---------- id: str id of",
"= 'ASC'): ''' gets a record from the warehouse table using id Parameters",
"# sql script sql_script = ''' DELETE FROM warehouse WHERE id = ?",
"represents the result ''' # aquiring cursor cursor = self.conn.cursor() # sql script",
"a record from the warehouse table using id Parameters ---------- order: str Default",
"PRIMARY KEY, name TEXT NOT NULL, location TEXT NOT NULL ); ''' def",
"cursor cursor = self.conn.cursor() # sql script sql_script = ''' UPDATE warehouse SET",
"---------- id: str id of the record in the db data_tuple: tuple tuple",
"INSERT INTO warehouse VALUES (?, ?, ?) ''' # executing script id =",
"arrangement of the returned query ASC: ascending order DESC: descending order Returns -------",
"= f''' SELECT * FROM warehouse ORDER BY name {order} ''' # executing",
"Warehouse table with the following structure warehouse( id CHARACTER(10) NOT NULL PRIMARY KEY,",
"sql script sql_script = ''' SELECT * FROM warehouse WHERE id = ?",
"gets a record from the warehouse table using id Parameters ---------- order: str",
"query def getAll(self, order: str = 'ASC'): ''' gets a record from the",
"Parameters ---------- id: str id of the record in the db data_tuple: tuple",
"str = 'ASC'): ''' gets a record from the warehouse table using id",
"Parameters ---------- data_tuple: tuple tuple of values (name, location) ''' # aquiring cursor",
"sql script sql_script = ''' DELETE FROM warehouse WHERE id = ? '''",
"---------- order: str Default = 'asc' arrangement of the returned query ASC: ascending",
"new_data) self.conn.commit() # conceding cursor cursor.close() def getByID(self, id: str): ''' gets a",
"ORM for Warehouse table with the following structure warehouse( id CHARACTER(10) NOT NULL",
"a new record in warehouse table Parameters ---------- data_tuple: tuple tuple of values",
"# aquiring cursor cursor = self.conn.cursor() # sql script sql_script = f''' SELECT",
"models import BaseModel, gen_id class WarehouseModel(BaseModel): ''' ORM for Warehouse table with the",
"the result ''' # aquiring cursor cursor = self.conn.cursor() # sql script sql_script",
"Parameters ---------- order: str Default = 'asc' arrangement of the returned query ASC:",
"insert(self, data_tuple: tuple): ''' Inserts a new record in warehouse table Parameters ----------",
"aquiring cursor cursor = self.conn.cursor() # sql script sql_script = ''' UPDATE warehouse",
"executing script id = gen_id() data_tuple = (id, *data_tuple) cursor.execute(sql_script, data_tuple) self.conn.commit() #",
"location) ''' # aquiring cursor cursor = self.conn.cursor() # sql script sql_script =",
"def getAll(self, order: str = 'ASC'): ''' gets a record from the warehouse",
"conceding cursor cursor.close() def update(self, id: str, new_data: tuple): ''' Updates a record",
"gets a record from the warehouse table using id Parameters ---------- id: str",
"id: str id of the record in the db Returns ------- query: tuple",
"warehouse WHERE id = ? ''' # executing script cursor.execute(sql_script, (id,)) query =",
"cursor = self.conn.cursor() # sql script sql_script = ''' SELECT * FROM warehouse",
"INTO warehouse VALUES (?, ?, ?) ''' # executing script id = gen_id()",
"str Default = 'asc' arrangement of the returned query ASC: ascending order DESC:",
"self.conn.cursor() # sql script sql_script = ''' UPDATE warehouse SET name = ?",
"self.conn.commit() # conceding cursor cursor.close() def update(self, id: str, new_data: tuple): ''' Updates",
"id Parameters ---------- order: str Default = 'asc' arrangement of the returned query",
"= self.conn.cursor() # sql script sql_script = f''' SELECT * FROM warehouse ORDER",
"(id, *data_tuple) cursor.execute(sql_script, data_tuple) self.conn.commit() # conceding cursor cursor.close() return id def delete(self,",
"a record from the warehouse table using id Parameters ---------- id: str id",
"(name, location) ''' # aquiring cursor cursor = self.conn.cursor() # sql script sql_script",
"name = ? , location = ? WHERE id = ? ''' #",
"sql script sql_script = ''' UPDATE warehouse SET name = ? , location",
"aquiring cursor cursor = self.conn.cursor() # sql script sql_script = ''' INSERT INTO",
"new record in warehouse table Parameters ---------- data_tuple: tuple tuple of values (name,",
"query = cursor.fetchone() # conceding cursor cursor.close() return query def getAll(self, order: str",
"f''' SELECT * FROM warehouse ORDER BY name {order} ''' # executing script",
"aquiring cursor cursor = self.conn.cursor() # sql script sql_script = ''' SELECT *",
"= self.conn.cursor() # sql script sql_script = ''' INSERT INTO warehouse VALUES (?,",
"record from the warehouse table using id Parameters ---------- order: str Default =",
"id of the record in the db Returns ------- query: tuple represents the",
"cursor.execute(sql_script, data_tuple) self.conn.commit() # conceding cursor cursor.close() return id def delete(self, id: str):",
"? ''' # executing script new_data = (*new_data, id) cursor.execute(sql_script, new_data) self.conn.commit() #",
"cursor = self.conn.cursor() # sql script sql_script = f''' SELECT * FROM warehouse",
"# executing script id = gen_id() data_tuple = (id, *data_tuple) cursor.execute(sql_script, data_tuple) self.conn.commit()",
"# aquiring cursor cursor = self.conn.cursor() # sql script sql_script = ''' SELECT",
"cursor cursor = self.conn.cursor() # sql script sql_script = ''' INSERT INTO warehouse",
"(id,)) query = cursor.fetchone() # conceding cursor cursor.close() return query def getAll(self, order:",
"NOT NULL PRIMARY KEY, name TEXT NOT NULL, location TEXT NOT NULL );",
"id def delete(self, id: str): ''' Deletes a record from warehouse table Parameters",
"sql script sql_script = ''' INSERT INTO warehouse VALUES (?, ?, ?) '''",
"id: str ''' # aquiring cursor cursor = self.conn.cursor() # sql script sql_script",
"super(WarehouseModel, self).__init__(db_filename) def insert(self, data_tuple: tuple): ''' Inserts a new record in warehouse",
"tuple): ''' Updates a record of the warehouse table using id Parameters ----------",
"str): ''' Deletes a record from warehouse table Parameters ---------- id: str '''",
"cursor.close() return query def getAll(self, order: str = 'ASC'): ''' gets a record",
"import BaseModel, gen_id class WarehouseModel(BaseModel): ''' ORM for Warehouse table with the following",
"cursor cursor = self.conn.cursor() # sql script sql_script = f''' SELECT * FROM",
"cursor cursor.close() return query def getAll(self, order: str = 'ASC'): ''' gets a",
"str id of the record in the db data_tuple: tuple tuple of new",
"new_data = (*new_data, id) cursor.execute(sql_script, new_data) self.conn.commit() # conceding cursor cursor.close() def getByID(self,",
"Returns ------- query: tuple represents the result ''' # aquiring cursor cursor =",
"query: tuple represents the result ''' # aquiring cursor cursor = self.conn.cursor() #",
"Updates a record of the warehouse table using id Parameters ---------- id: str",
"returned query ASC: ascending order DESC: descending order Returns ------- query: list results",
"script id = gen_id() data_tuple = (id, *data_tuple) cursor.execute(sql_script, data_tuple) self.conn.commit() # conceding",
"cursor.close() def update(self, id: str, new_data: tuple): ''' Updates a record of the",
"KEY, name TEXT NOT NULL, location TEXT NOT NULL ); ''' def __init__(self,",
"of the record in the db Returns ------- query: tuple represents the result",
"sql_script = ''' UPDATE warehouse SET name = ? , location = ?",
"= gen_id() data_tuple = (id, *data_tuple) cursor.execute(sql_script, data_tuple) self.conn.commit() # conceding cursor cursor.close()",
"? ''' # executing script cursor.execute(sql_script, (id,)) query = cursor.fetchone() # conceding cursor",
"# executing script cursor.execute(sql_script) query = cursor.fetchall() # conceding cursor cursor.close() return query",
"order: str = 'ASC'): ''' gets a record from the warehouse table using",
"CHARACTER(10) NOT NULL PRIMARY KEY, name TEXT NOT NULL, location TEXT NOT NULL",
"id: str): ''' gets a record from the warehouse table using id Parameters",
"* FROM warehouse ORDER BY name {order} ''' # executing script cursor.execute(sql_script) query",
"script sql_script = ''' DELETE FROM warehouse WHERE id = ? ''' #",
"= self.conn.cursor() # sql script sql_script = ''' SELECT * FROM warehouse WHERE",
"WarehouseModel(BaseModel): ''' ORM for Warehouse table with the following structure warehouse( id CHARACTER(10)",
", location = ? WHERE id = ? ''' # executing script new_data",
"Default = 'asc' arrangement of the returned query ASC: ascending order DESC: descending",
"record of the warehouse table using id Parameters ---------- id: str id of",
"in warehouse table Parameters ---------- data_tuple: tuple tuple of values (name, location) '''",
"? WHERE id = ? ''' # executing script new_data = (*new_data, id)",
"list ''' # aquiring cursor cursor = self.conn.cursor() # sql script sql_script =",
"data_tuple: tuple tuple of values (name, location) ''' # aquiring cursor cursor =",
"tuple represents the result ''' # aquiring cursor cursor = self.conn.cursor() # sql",
"TEXT NOT NULL, location TEXT NOT NULL ); ''' def __init__(self, db_filename: str):",
"warehouse table using id Parameters ---------- id: str id of the record in",
"tuple tuple of new values (name, location) ''' # aquiring cursor cursor =",
"BaseModel, gen_id class WarehouseModel(BaseModel): ''' ORM for Warehouse table with the following structure",
"sql_script = ''' DELETE FROM warehouse WHERE id = ? ''' # executing",
"''' # aquiring cursor cursor = self.conn.cursor() # sql script sql_script = f'''",
"sql script sql_script = f''' SELECT * FROM warehouse ORDER BY name {order}",
"a record from warehouse table Parameters ---------- id: str ''' # aquiring cursor",
"the record in the db data_tuple: tuple tuple of new values (name, location)",
"script sql_script = ''' INSERT INTO warehouse VALUES (?, ?, ?) ''' #",
"from warehouse table Parameters ---------- id: str ''' # aquiring cursor cursor =",
"id = gen_id() data_tuple = (id, *data_tuple) cursor.execute(sql_script, data_tuple) self.conn.commit() # conceding cursor",
"NOT NULL ); ''' def __init__(self, db_filename: str): super(WarehouseModel, self).__init__(db_filename) def insert(self, data_tuple:",
"descending order Returns ------- query: list results list ''' # aquiring cursor cursor",
"of values (name, location) ''' # aquiring cursor cursor = self.conn.cursor() # sql",
"def getByID(self, id: str): ''' gets a record from the warehouse table using",
"Parameters ---------- id: str id of the record in the db Returns -------",
"= ? , location = ? WHERE id = ? ''' # executing",
"results list ''' # aquiring cursor cursor = self.conn.cursor() # sql script sql_script",
"executing script new_data = (*new_data, id) cursor.execute(sql_script, new_data) self.conn.commit() # conceding cursor cursor.close()",
"SELECT * FROM warehouse ORDER BY name {order} ''' # executing script cursor.execute(sql_script)",
"location = ? WHERE id = ? ''' # executing script new_data =",
"the following structure warehouse( id CHARACTER(10) NOT NULL PRIMARY KEY, name TEXT NOT",
"NOT NULL, location TEXT NOT NULL ); ''' def __init__(self, db_filename: str): super(WarehouseModel,",
"query: list results list ''' # aquiring cursor cursor = self.conn.cursor() # sql",
"------- query: tuple represents the result ''' # aquiring cursor cursor = self.conn.cursor()",
"cursor.execute(sql_script, (id,)) self.conn.commit() # conceding cursor cursor.close() def update(self, id: str, new_data: tuple):",
"ascending order DESC: descending order Returns ------- query: list results list ''' #",
"self.conn.cursor() # sql script sql_script = f''' SELECT * FROM warehouse ORDER BY",
"Inserts a new record in warehouse table Parameters ---------- data_tuple: tuple tuple of",
"''' # aquiring cursor cursor = self.conn.cursor() # sql script sql_script = '''",
"?) ''' # executing script id = gen_id() data_tuple = (id, *data_tuple) cursor.execute(sql_script,",
"id of the record in the db data_tuple: tuple tuple of new values",
"table using id Parameters ---------- order: str Default = 'asc' arrangement of the",
"SET name = ? , location = ? WHERE id = ? '''",
"id: str, new_data: tuple): ''' Updates a record of the warehouse table using",
"= ? ''' # executing script cursor.execute(sql_script, (id,)) self.conn.commit() # conceding cursor cursor.close()",
"# conceding cursor cursor.close() def update(self, id: str, new_data: tuple): ''' Updates a",
"script sql_script = ''' UPDATE warehouse SET name = ? , location =",
"ORDER BY name {order} ''' # executing script cursor.execute(sql_script) query = cursor.fetchall() #",
"of the record in the db data_tuple: tuple tuple of new values (name,",
"= ? ''' # executing script new_data = (*new_data, id) cursor.execute(sql_script, new_data) self.conn.commit()",
"the warehouse table using id Parameters ---------- order: str Default = 'asc' arrangement",
"= (id, *data_tuple) cursor.execute(sql_script, data_tuple) self.conn.commit() # conceding cursor cursor.close() return id def",
"self.conn.commit() # conceding cursor cursor.close() def getByID(self, id: str): ''' gets a record",
"__init__(self, db_filename: str): super(WarehouseModel, self).__init__(db_filename) def insert(self, data_tuple: tuple): ''' Inserts a new",
"db Returns ------- query: tuple represents the result ''' # aquiring cursor cursor",
"# executing script new_data = (*new_data, id) cursor.execute(sql_script, new_data) self.conn.commit() # conceding cursor",
"conceding cursor cursor.close() return query def getAll(self, order: str = 'ASC'): ''' gets",
"using id Parameters ---------- order: str Default = 'asc' arrangement of the returned",
"update(self, id: str, new_data: tuple): ''' Updates a record of the warehouse table",
"id: str): ''' Deletes a record from warehouse table Parameters ---------- id: str",
"? ''' # executing script cursor.execute(sql_script, (id,)) self.conn.commit() # conceding cursor cursor.close() def",
"db data_tuple: tuple tuple of new values (name, location) ''' # aquiring cursor",
"gen_id() data_tuple = (id, *data_tuple) cursor.execute(sql_script, data_tuple) self.conn.commit() # conceding cursor cursor.close() return",
"record in the db data_tuple: tuple tuple of new values (name, location) '''",
"table Parameters ---------- data_tuple: tuple tuple of values (name, location) ''' # aquiring",
"str id of the record in the db Returns ------- query: tuple represents",
"id) cursor.execute(sql_script, new_data) self.conn.commit() # conceding cursor cursor.close() def getByID(self, id: str): '''",
"UPDATE warehouse SET name = ? , location = ? WHERE id =",
"new_data: tuple): ''' Updates a record of the warehouse table using id Parameters",
"''' gets a record from the warehouse table using id Parameters ---------- order:",
"record from warehouse table Parameters ---------- id: str ''' # aquiring cursor cursor",
"order DESC: descending order Returns ------- query: list results list ''' # aquiring",
"str): ''' gets a record from the warehouse table using id Parameters ----------",
"= ? WHERE id = ? ''' # executing script new_data = (*new_data,",
"= ''' UPDATE warehouse SET name = ? , location = ? WHERE",
"str, new_data: tuple): ''' Updates a record of the warehouse table using id",
"tuple of new values (name, location) ''' # aquiring cursor cursor = self.conn.cursor()",
"getByID(self, id: str): ''' gets a record from the warehouse table using id",
"table Parameters ---------- id: str ''' # aquiring cursor cursor = self.conn.cursor() #",
"''' SELECT * FROM warehouse WHERE id = ? ''' # executing script",
"table with the following structure warehouse( id CHARACTER(10) NOT NULL PRIMARY KEY, name",
"id Parameters ---------- id: str id of the record in the db data_tuple:",
"''' Updates a record of the warehouse table using id Parameters ---------- id:",
"of the warehouse table using id Parameters ---------- id: str id of the",
"warehouse table Parameters ---------- id: str ''' # aquiring cursor cursor = self.conn.cursor()",
"record in warehouse table Parameters ---------- data_tuple: tuple tuple of values (name, location)",
"of new values (name, location) ''' # aquiring cursor cursor = self.conn.cursor() #",
"= ''' SELECT * FROM warehouse WHERE id = ? ''' # executing",
"------- query: list results list ''' # aquiring cursor cursor = self.conn.cursor() #",
"gen_id class WarehouseModel(BaseModel): ''' ORM for Warehouse table with the following structure warehouse(",
"''' # executing script new_data = (*new_data, id) cursor.execute(sql_script, new_data) self.conn.commit() # conceding",
"data_tuple: tuple): ''' Inserts a new record in warehouse table Parameters ---------- data_tuple:",
"def insert(self, data_tuple: tuple): ''' Inserts a new record in warehouse table Parameters",
"WHERE id = ? ''' # executing script new_data = (*new_data, id) cursor.execute(sql_script,",
"# sql script sql_script = f''' SELECT * FROM warehouse ORDER BY name",
"id: str id of the record in the db data_tuple: tuple tuple of",
"sql_script = ''' SELECT * FROM warehouse WHERE id = ? ''' #",
"{order} ''' # executing script cursor.execute(sql_script) query = cursor.fetchall() # conceding cursor cursor.close()",
"cursor cursor.close() def getByID(self, id: str): ''' gets a record from the warehouse",
"warehouse VALUES (?, ?, ?) ''' # executing script id = gen_id() data_tuple",
"''' # executing script cursor.execute(sql_script, (id,)) query = cursor.fetchone() # conceding cursor cursor.close()",
"aquiring cursor cursor = self.conn.cursor() # sql script sql_script = ''' DELETE FROM",
"list results list ''' # aquiring cursor cursor = self.conn.cursor() # sql script",
"sql_script = ''' INSERT INTO warehouse VALUES (?, ?, ?) ''' # executing",
"def __init__(self, db_filename: str): super(WarehouseModel, self).__init__(db_filename) def insert(self, data_tuple: tuple): ''' Inserts a",
"id CHARACTER(10) NOT NULL PRIMARY KEY, name TEXT NOT NULL, location TEXT NOT",
"BY name {order} ''' # executing script cursor.execute(sql_script) query = cursor.fetchall() # conceding",
"executing script cursor.execute(sql_script, (id,)) self.conn.commit() # conceding cursor cursor.close() def update(self, id: str,",
"executing script cursor.execute(sql_script, (id,)) query = cursor.fetchone() # conceding cursor cursor.close() return query",
"cursor.close() def getByID(self, id: str): ''' gets a record from the warehouse table",
"using id Parameters ---------- id: str id of the record in the db",
"(?, ?, ?) ''' # executing script id = gen_id() data_tuple = (id,",
"tuple tuple of values (name, location) ''' # aquiring cursor cursor = self.conn.cursor()",
"query ASC: ascending order DESC: descending order Returns ------- query: list results list",
"self.conn.cursor() # sql script sql_script = ''' SELECT * FROM warehouse WHERE id",
"''' # executing script cursor.execute(sql_script, (id,)) self.conn.commit() # conceding cursor cursor.close() def update(self,",
"Returns ------- query: list results list ''' # aquiring cursor cursor = self.conn.cursor()",
"''' Inserts a new record in warehouse table Parameters ---------- data_tuple: tuple tuple",
"str ''' # aquiring cursor cursor = self.conn.cursor() # sql script sql_script =",
"structure warehouse( id CHARACTER(10) NOT NULL PRIMARY KEY, name TEXT NOT NULL, location"
] |
[
"'True', 'true', 'yes', '1') false_values = (False, 'False', 'false', 'no', '0') if obj",
"role_name=None, base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'), exclude_subdirs=['templates', 'files',",
"'1') false_values = (False, 'False', 'false', 'no', '0') if obj in true_values: return",
"= base_path self.log_level = log_level self.subdir_template = subdir_template self.root_template = root_template self.exclude_subdirs =",
"level = logging.getLevelName(log_level) with open(log_config_file, 'rt') as f: log_config = yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if",
"obj class Settings(object): def __init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None, base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'),",
"if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def normalize_path(path): normalized = os.path.abspath(os.path.expanduser(path)) return normalized def convert_bool(obj):",
"False else: if not isinstance(obj, six.text_type): obj = six.text_type(obj, \"utf-8\") return obj class",
"config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None, base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'),",
"subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'), exclude_subdirs=['templates', 'files', 'vars'], enable_templating=False, template_vars={}",
"import os import yaml import logging import logging.config from appdirs import AppDirs from",
"= yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def update_log_level(log_level): level = logging.getLevelName(log_level) if",
"return obj class Settings(object): def __init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None, base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler',",
"role_name self.base_path = base_path self.log_level = log_level self.subdir_template = subdir_template self.root_template = root_template",
"log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'), exclude_subdirs=['templates', 'files', 'vars'], enable_templating=False,",
"resource_filename def setup_logging(log_level): log_config_file = os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml') level = logging.getLevelName(log_level) with",
"six.text_type): obj = six.text_type(obj, \"utf-8\") return obj class Settings(object): def __init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir,",
"= log_level self.subdir_template = subdir_template self.root_template = root_template self.exclude_subdirs = exclude_subdirs self.enable_templating =",
"self.config_file = config_file self.role_name = role_name self.base_path = base_path self.log_level = log_level self.subdir_template",
"return def normalize_path(path): normalized = os.path.abspath(os.path.expanduser(path)) return normalized def convert_bool(obj): true_values = (True,",
"(False, 'False', 'false', 'no', '0') if obj in true_values: return True elif obj",
"false_values = (False, 'False', 'false', 'no', '0') if obj in true_values: return True",
"AppDirs from pkg_resources import resource_filename def setup_logging(log_level): log_config_file = os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml')",
"base_path self.log_level = log_level self.subdir_template = subdir_template self.root_template = root_template self.exclude_subdirs = exclude_subdirs",
"return True elif obj in false_values: return False else: if not isinstance(obj, six.text_type):",
"obj in false_values: return False else: if not isinstance(obj, six.text_type): obj = six.text_type(obj,",
"return def update_log_level(log_level): level = logging.getLevelName(log_level) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def normalize_path(path): normalized",
"Settings(object): def __init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None, base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'),",
"f: log_config = yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def update_log_level(log_level): level =",
"import yaml import logging import logging.config from appdirs import AppDirs from pkg_resources import",
"os import yaml import logging import logging.config from appdirs import AppDirs from pkg_resources",
"import resource_filename def setup_logging(log_level): log_config_file = os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml') level = logging.getLevelName(log_level)",
"level: logging.getLogger(\"ansibleroler\").setLevel(level) return def normalize_path(path): normalized = os.path.abspath(os.path.expanduser(path)) return normalized def convert_bool(obj): true_values",
"def update_log_level(log_level): level = logging.getLevelName(log_level) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def normalize_path(path): normalized =",
"else: if not isinstance(obj, six.text_type): obj = six.text_type(obj, \"utf-8\") return obj class Settings(object):",
"self.subdir_template = subdir_template self.root_template = root_template self.exclude_subdirs = exclude_subdirs self.enable_templating = enable_templating self.template_vars",
"'false', 'no', '0') if obj in true_values: return True elif obj in false_values:",
"true_values: return True elif obj in false_values: return False else: if not isinstance(obj,",
"self.base_path = base_path self.log_level = log_level self.subdir_template = subdir_template self.root_template = root_template self.exclude_subdirs",
"'static'), 'config', 'logging.yml') level = logging.getLevelName(log_level) with open(log_config_file, 'rt') as f: log_config =",
"enable_templating=False, template_vars={} ): self.config_file = config_file self.role_name = role_name self.base_path = base_path self.log_level",
"= role_name self.base_path = base_path self.log_level = log_level self.subdir_template = subdir_template self.root_template =",
"os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml') level = logging.getLevelName(log_level) with open(log_config_file, 'rt') as f: log_config",
"true_values = (True, 'True', 'true', 'yes', '1') false_values = (False, 'False', 'false', 'no',",
"'templates', '.drone.yml.j2'), exclude_subdirs=['templates', 'files', 'vars'], enable_templating=False, template_vars={} ): self.config_file = config_file self.role_name =",
"normalized = os.path.abspath(os.path.expanduser(path)) return normalized def convert_bool(obj): true_values = (True, 'True', 'true', 'yes',",
"\"utf-8\") return obj class Settings(object): def __init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None, base_path=os.getcwd(), log_level='WARNING',",
"logging.config from appdirs import AppDirs from pkg_resources import resource_filename def setup_logging(log_level): log_config_file =",
"logging.getLevelName(log_level) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def normalize_path(path): normalized = os.path.abspath(os.path.expanduser(path)) return normalized def",
"in false_values: return False else: if not isinstance(obj, six.text_type): obj = six.text_type(obj, \"utf-8\")",
"'true', 'yes', '1') false_values = (False, 'False', 'false', 'no', '0') if obj in",
"= config_file self.role_name = role_name self.base_path = base_path self.log_level = log_level self.subdir_template =",
"'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'), exclude_subdirs=['templates', 'files', 'vars'], enable_templating=False, template_vars={} ): self.config_file",
"def __init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None, base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler',",
"def normalize_path(path): normalized = os.path.abspath(os.path.expanduser(path)) return normalized def convert_bool(obj): true_values = (True, 'True',",
"= os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml') level = logging.getLevelName(log_level) with open(log_config_file, 'rt') as f:",
"True elif obj in false_values: return False else: if not isinstance(obj, six.text_type): obj",
"logging.getLogger(\"ansibleroler\").setLevel(level) return def normalize_path(path): normalized = os.path.abspath(os.path.expanduser(path)) return normalized def convert_bool(obj): true_values =",
"logging.config.dictConfig(log_config) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def update_log_level(log_level): level = logging.getLevelName(log_level) if level: logging.getLogger(\"ansibleroler\").setLevel(level)",
"false_values: return False else: if not isinstance(obj, six.text_type): obj = six.text_type(obj, \"utf-8\") return",
"not isinstance(obj, six.text_type): obj = six.text_type(obj, \"utf-8\") return obj class Settings(object): def __init__(",
"def convert_bool(obj): true_values = (True, 'True', 'true', 'yes', '1') false_values = (False, 'False',",
"obj = six.text_type(obj, \"utf-8\") return obj class Settings(object): def __init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"),",
"from pkg_resources import resource_filename def setup_logging(log_level): log_config_file = os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml') level",
"log_config = yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def update_log_level(log_level): level = logging.getLevelName(log_level)",
"'files', 'vars'], enable_templating=False, template_vars={} ): self.config_file = config_file self.role_name = role_name self.base_path =",
"'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'), exclude_subdirs=['templates', 'files', 'vars'], enable_templating=False, template_vars={} ): self.config_file =",
"yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def update_log_level(log_level): level = logging.getLevelName(log_level) if level:",
"return False else: if not isinstance(obj, six.text_type): obj = six.text_type(obj, \"utf-8\") return obj",
"base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'), exclude_subdirs=['templates', 'files', 'vars'],",
"as f: log_config = yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def update_log_level(log_level): level",
"os.path.abspath(os.path.expanduser(path)) return normalized def convert_bool(obj): true_values = (True, 'True', 'true', 'yes', '1') false_values",
"elif obj in false_values: return False else: if not isinstance(obj, six.text_type): obj =",
"if not isinstance(obj, six.text_type): obj = six.text_type(obj, \"utf-8\") return obj class Settings(object): def",
"update_log_level(log_level): level = logging.getLevelName(log_level) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def normalize_path(path): normalized = os.path.abspath(os.path.expanduser(path))",
"if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def update_log_level(log_level): level = logging.getLevelName(log_level) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return",
"class Settings(object): def __init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None, base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates',",
"import AppDirs from pkg_resources import resource_filename def setup_logging(log_level): log_config_file = os.path.join(resource_filename('ansibleroler', 'static'), 'config',",
"(True, 'True', 'true', 'yes', '1') false_values = (False, 'False', 'false', 'no', '0') if",
"import logging.config from appdirs import AppDirs from pkg_resources import resource_filename def setup_logging(log_level): log_config_file",
"log_config_file = os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml') level = logging.getLevelName(log_level) with open(log_config_file, 'rt') as",
"isinstance(obj, six.text_type): obj = six.text_type(obj, \"utf-8\") return obj class Settings(object): def __init__( self,",
"= six.text_type(obj, \"utf-8\") return obj class Settings(object): def __init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None,",
"= os.path.abspath(os.path.expanduser(path)) return normalized def convert_bool(obj): true_values = (True, 'True', 'true', 'yes', '1')",
"def setup_logging(log_level): log_config_file = os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml') level = logging.getLevelName(log_level) with open(log_config_file,",
"self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None, base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates',",
"'static'), 'templates', '.drone.yml.j2'), exclude_subdirs=['templates', 'files', 'vars'], enable_templating=False, template_vars={} ): self.config_file = config_file self.role_name",
"= (True, 'True', 'true', 'yes', '1') false_values = (False, 'False', 'false', 'no', '0')",
"exclude_subdirs=['templates', 'files', 'vars'], enable_templating=False, template_vars={} ): self.config_file = config_file self.role_name = role_name self.base_path",
"six import os import yaml import logging import logging.config from appdirs import AppDirs",
"'static'), 'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'), exclude_subdirs=['templates', 'files', 'vars'], enable_templating=False, template_vars={} ):",
"normalize_path(path): normalized = os.path.abspath(os.path.expanduser(path)) return normalized def convert_bool(obj): true_values = (True, 'True', 'true',",
"'rt') as f: log_config = yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def update_log_level(log_level):",
"import six import os import yaml import logging import logging.config from appdirs import",
"six.text_type(obj, \"utf-8\") return obj class Settings(object): def __init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None, base_path=os.getcwd(),",
"logging.getLevelName(log_level) with open(log_config_file, 'rt') as f: log_config = yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if level: logging.getLogger(\"ansibleroler\").setLevel(level)",
"obj in true_values: return True elif obj in false_values: return False else: if",
"in true_values: return True elif obj in false_values: return False else: if not",
"'0') if obj in true_values: return True elif obj in false_values: return False",
"'no', '0') if obj in true_values: return True elif obj in false_values: return",
"self.log_level = log_level self.subdir_template = subdir_template self.root_template = root_template self.exclude_subdirs = exclude_subdirs self.enable_templating",
"subdir_template self.root_template = root_template self.exclude_subdirs = exclude_subdirs self.enable_templating = enable_templating self.template_vars = template_vars",
"__init__( self, config_file=os.path.join(AppDirs(\"ansible-roler\").user_config_dir, \"config.ini\"), role_name=None, base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'),",
"convert_bool(obj): true_values = (True, 'True', 'true', 'yes', '1') false_values = (False, 'False', 'false',",
"= (False, 'False', 'false', 'no', '0') if obj in true_values: return True elif",
"if obj in true_values: return True elif obj in false_values: return False else:",
"pkg_resources import resource_filename def setup_logging(log_level): log_config_file = os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml') level =",
"'logging.yml') level = logging.getLevelName(log_level) with open(log_config_file, 'rt') as f: log_config = yaml.safe_load(f.read()) logging.config.dictConfig(log_config)",
"setup_logging(log_level): log_config_file = os.path.join(resource_filename('ansibleroler', 'static'), 'config', 'logging.yml') level = logging.getLevelName(log_level) with open(log_config_file, 'rt')",
"logging import logging.config from appdirs import AppDirs from pkg_resources import resource_filename def setup_logging(log_level):",
"logging.getLogger(\"ansibleroler\").setLevel(level) return def update_log_level(log_level): level = logging.getLevelName(log_level) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def normalize_path(path):",
"return normalized def convert_bool(obj): true_values = (True, 'True', 'true', 'yes', '1') false_values =",
"level: logging.getLogger(\"ansibleroler\").setLevel(level) return def update_log_level(log_level): level = logging.getLevelName(log_level) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def",
"= logging.getLevelName(log_level) with open(log_config_file, 'rt') as f: log_config = yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if level:",
"config_file self.role_name = role_name self.base_path = base_path self.log_level = log_level self.subdir_template = subdir_template",
"import logging import logging.config from appdirs import AppDirs from pkg_resources import resource_filename def",
"with open(log_config_file, 'rt') as f: log_config = yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return",
"log_level self.subdir_template = subdir_template self.root_template = root_template self.exclude_subdirs = exclude_subdirs self.enable_templating = enable_templating",
"= logging.getLevelName(log_level) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def normalize_path(path): normalized = os.path.abspath(os.path.expanduser(path)) return normalized",
"level = logging.getLevelName(log_level) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def normalize_path(path): normalized = os.path.abspath(os.path.expanduser(path)) return",
"self.role_name = role_name self.base_path = base_path self.log_level = log_level self.subdir_template = subdir_template self.root_template",
"root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'), exclude_subdirs=['templates', 'files', 'vars'], enable_templating=False, template_vars={} ): self.config_file = config_file",
"'vars'], enable_templating=False, template_vars={} ): self.config_file = config_file self.role_name = role_name self.base_path = base_path",
"'False', 'false', 'no', '0') if obj in true_values: return True elif obj in",
"'config', 'logging.yml') level = logging.getLevelName(log_level) with open(log_config_file, 'rt') as f: log_config = yaml.safe_load(f.read())",
"yaml import logging import logging.config from appdirs import AppDirs from pkg_resources import resource_filename",
"= subdir_template self.root_template = root_template self.exclude_subdirs = exclude_subdirs self.enable_templating = enable_templating self.template_vars =",
"): self.config_file = config_file self.role_name = role_name self.base_path = base_path self.log_level = log_level",
"'.drone.yml.j2'), exclude_subdirs=['templates', 'files', 'vars'], enable_templating=False, template_vars={} ): self.config_file = config_file self.role_name = role_name",
"\"config.ini\"), role_name=None, base_path=os.getcwd(), log_level='WARNING', subdir_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', 'main.yml.j2'), root_template=os.path.join(resource_filename('ansibleroler', 'static'), 'templates', '.drone.yml.j2'), exclude_subdirs=['templates',",
"template_vars={} ): self.config_file = config_file self.role_name = role_name self.base_path = base_path self.log_level =",
"from appdirs import AppDirs from pkg_resources import resource_filename def setup_logging(log_level): log_config_file = os.path.join(resource_filename('ansibleroler',",
"appdirs import AppDirs from pkg_resources import resource_filename def setup_logging(log_level): log_config_file = os.path.join(resource_filename('ansibleroler', 'static'),",
"open(log_config_file, 'rt') as f: log_config = yaml.safe_load(f.read()) logging.config.dictConfig(log_config) if level: logging.getLogger(\"ansibleroler\").setLevel(level) return def",
"'yes', '1') false_values = (False, 'False', 'false', 'no', '0') if obj in true_values:",
"normalized def convert_bool(obj): true_values = (True, 'True', 'true', 'yes', '1') false_values = (False,"
] |
[] |
[
"sitemap_file_error = None sitemap_received = False sitemap_lxml = None sitemap_plain_text = None sitemap_type",
"result.text self.sitemap_headers = result.headers def set_local_file(self, file_contents): self.sitemap_source_type = 'local' self.sitemap_received = True",
"'sitemap_type': self.sitemap_type} def get_content(self): if self.sitemap_contents: return self.sitemap_contents.strip() else: return None def has_lxml(self):",
"__init__(self, filename): self.sitemap_file_name = filename def set_remote_file_from_requests(self, result): self.sitemap_source_type = 'remote' self.sitemap_received =",
"sitemap_lxml = None sitemap_plain_text = None sitemap_type = None # one of xml_sitemap,",
"return True else: return False def set_lxml(self, lxml): self.sitemap_lxml = lxml def set_plain_text(self,",
"atom_feed, plain_text, # invalid_file_format def __init__(self, filename): self.sitemap_file_name = filename def set_remote_file_from_requests(self, result):",
"'remote' self.sitemap_received = True self.sitemap_contents = result.text self.sitemap_headers = result.headers def set_local_file(self, file_contents):",
"def set_local_file(self, file_contents): self.sitemap_source_type = 'local' self.sitemap_received = True self.sitemap_contents = file_contents def",
"return False def set_lxml(self, lxml): self.sitemap_lxml = lxml def set_plain_text(self, content): self.sitemap_plain_text =",
"lxml def set_plain_text(self, content): self.sitemap_plain_text = content def set_sitemap_type(self, sitemap_type): self.sitemap_type = sitemap_type",
"message def get_file_status_as_dict(self): return {'file_name': self.sitemap_file_name, 'file_headers': self.sitemap_headers, 'source_type': self.sitemap_source_type, 'file_error': self.sitemap_file_error, 'file_received':",
"def set_lxml(self, lxml): self.sitemap_lxml = lxml def set_plain_text(self, content): self.sitemap_plain_text = content def",
"sitemap_plain_text = None sitemap_type = None # one of xml_sitemap, xml_sitemap_index, rss_feed, atom_feed,",
"invalid_file_format def __init__(self, filename): self.sitemap_file_name = filename def set_remote_file_from_requests(self, result): self.sitemap_source_type = 'remote'",
"message): self.sitemap_source_type = 'remote' self.sitemap_file_error = message def error_receiving_local_file(self, message): self.sitemap_source_type = 'local'",
"= message def error_receiving_local_file(self, message): self.sitemap_source_type = 'local' self.sitemap_file_error = message def get_file_status_as_dict(self):",
"get_file_status_as_dict(self): return {'file_name': self.sitemap_file_name, 'file_headers': self.sitemap_headers, 'source_type': self.sitemap_source_type, 'file_error': self.sitemap_file_error, 'file_received': self.sitemap_received, 'sitemap_type':",
"None sitemap_plain_text = None sitemap_type = None # one of xml_sitemap, xml_sitemap_index, rss_feed,",
"or local sitemap_file_error = None sitemap_received = False sitemap_lxml = None sitemap_plain_text =",
"= None sitemap_source_type = None # remote or local sitemap_file_error = None sitemap_received",
"{'file_name': self.sitemap_file_name, 'file_headers': self.sitemap_headers, 'source_type': self.sitemap_source_type, 'file_error': self.sitemap_file_error, 'file_received': self.sitemap_received, 'sitemap_type': self.sitemap_type} def",
"def set_remote_file_from_requests(self, result): self.sitemap_source_type = 'remote' self.sitemap_received = True self.sitemap_contents = result.text self.sitemap_headers",
"sitemap_received = False sitemap_lxml = None sitemap_plain_text = None sitemap_type = None #",
"self.sitemap_headers = result.headers def set_local_file(self, file_contents): self.sitemap_source_type = 'local' self.sitemap_received = True self.sitemap_contents",
"xml_sitemap_index, rss_feed, atom_feed, plain_text, # invalid_file_format def __init__(self, filename): self.sitemap_file_name = filename def",
"'source_type': self.sitemap_source_type, 'file_error': self.sitemap_file_error, 'file_received': self.sitemap_received, 'sitemap_type': self.sitemap_type} def get_content(self): if self.sitemap_contents: return",
"= None sitemap_headers = None sitemap_source_type = None # remote or local sitemap_file_error",
"has_lxml(self): if self.sitemap_lxml: return True else: return False def set_lxml(self, lxml): self.sitemap_lxml =",
"None # remote or local sitemap_file_error = None sitemap_received = False sitemap_lxml =",
"self.sitemap_received, 'sitemap_type': self.sitemap_type} def get_content(self): if self.sitemap_contents: return self.sitemap_contents.strip() else: return None def",
"filename): self.sitemap_file_name = filename def set_remote_file_from_requests(self, result): self.sitemap_source_type = 'remote' self.sitemap_received = True",
"def get_content(self): if self.sitemap_contents: return self.sitemap_contents.strip() else: return None def has_lxml(self): if self.sitemap_lxml:",
"= result.text self.sitemap_headers = result.headers def set_local_file(self, file_contents): self.sitemap_source_type = 'local' self.sitemap_received =",
"remote or local sitemap_file_error = None sitemap_received = False sitemap_lxml = None sitemap_plain_text",
"if self.sitemap_lxml: return True else: return False def set_lxml(self, lxml): self.sitemap_lxml = lxml",
"def has_lxml(self): if self.sitemap_lxml: return True else: return False def set_lxml(self, lxml): self.sitemap_lxml",
"'local' self.sitemap_file_error = message def get_file_status_as_dict(self): return {'file_name': self.sitemap_file_name, 'file_headers': self.sitemap_headers, 'source_type': self.sitemap_source_type,",
"class SitemapFile: sitemap_file_name = None sitemap_contents = None sitemap_headers = None sitemap_source_type =",
"sitemap_source_type = None # remote or local sitemap_file_error = None sitemap_received = False",
"self.sitemap_contents.strip() else: return None def has_lxml(self): if self.sitemap_lxml: return True else: return False",
"# remote or local sitemap_file_error = None sitemap_received = False sitemap_lxml = None",
"return self.sitemap_contents.strip() else: return None def has_lxml(self): if self.sitemap_lxml: return True else: return",
"= None sitemap_plain_text = None sitemap_type = None # one of xml_sitemap, xml_sitemap_index,",
"None sitemap_source_type = None # remote or local sitemap_file_error = None sitemap_received =",
"'file_received': self.sitemap_received, 'sitemap_type': self.sitemap_type} def get_content(self): if self.sitemap_contents: return self.sitemap_contents.strip() else: return None",
"result.headers def set_local_file(self, file_contents): self.sitemap_source_type = 'local' self.sitemap_received = True self.sitemap_contents = file_contents",
"get_content(self): if self.sitemap_contents: return self.sitemap_contents.strip() else: return None def has_lxml(self): if self.sitemap_lxml: return",
"self.sitemap_lxml: return True else: return False def set_lxml(self, lxml): self.sitemap_lxml = lxml def",
"if self.sitemap_contents: return self.sitemap_contents.strip() else: return None def has_lxml(self): if self.sitemap_lxml: return True",
"set_local_file(self, file_contents): self.sitemap_source_type = 'local' self.sitemap_received = True self.sitemap_contents = file_contents def error_receiving_file(self,",
"set_remote_file_from_requests(self, result): self.sitemap_source_type = 'remote' self.sitemap_received = True self.sitemap_contents = result.text self.sitemap_headers =",
"self.sitemap_source_type = 'local' self.sitemap_file_error = message def get_file_status_as_dict(self): return {'file_name': self.sitemap_file_name, 'file_headers': self.sitemap_headers,",
"local sitemap_file_error = None sitemap_received = False sitemap_lxml = None sitemap_plain_text = None",
"self.sitemap_source_type = 'local' self.sitemap_received = True self.sitemap_contents = file_contents def error_receiving_file(self, message): self.sitemap_file_error",
"lxml): self.sitemap_lxml = lxml def set_plain_text(self, content): self.sitemap_plain_text = content def set_sitemap_type(self, sitemap_type):",
"= 'local' self.sitemap_received = True self.sitemap_contents = file_contents def error_receiving_file(self, message): self.sitemap_file_error =",
"self.sitemap_lxml = lxml def set_plain_text(self, content): self.sitemap_plain_text = content def set_sitemap_type(self, sitemap_type): self.sitemap_type",
"self.sitemap_received = True self.sitemap_contents = result.text self.sitemap_headers = result.headers def set_local_file(self, file_contents): self.sitemap_source_type",
"def error_receiving_remote_file(self, message): self.sitemap_source_type = 'remote' self.sitemap_file_error = message def error_receiving_local_file(self, message): self.sitemap_source_type",
"self.sitemap_file_name, 'file_headers': self.sitemap_headers, 'source_type': self.sitemap_source_type, 'file_error': self.sitemap_file_error, 'file_received': self.sitemap_received, 'sitemap_type': self.sitemap_type} def get_content(self):",
"one of xml_sitemap, xml_sitemap_index, rss_feed, atom_feed, plain_text, # invalid_file_format def __init__(self, filename): self.sitemap_file_name",
"= True self.sitemap_contents = result.text self.sitemap_headers = result.headers def set_local_file(self, file_contents): self.sitemap_source_type =",
"= result.headers def set_local_file(self, file_contents): self.sitemap_source_type = 'local' self.sitemap_received = True self.sitemap_contents =",
"False def set_lxml(self, lxml): self.sitemap_lxml = lxml def set_plain_text(self, content): self.sitemap_plain_text = content",
"self.sitemap_contents = file_contents def error_receiving_file(self, message): self.sitemap_file_error = message def error_receiving_remote_file(self, message): self.sitemap_source_type",
"result): self.sitemap_source_type = 'remote' self.sitemap_received = True self.sitemap_contents = result.text self.sitemap_headers = result.headers",
"self.sitemap_received = True self.sitemap_contents = file_contents def error_receiving_file(self, message): self.sitemap_file_error = message def",
"self.sitemap_headers, 'source_type': self.sitemap_source_type, 'file_error': self.sitemap_file_error, 'file_received': self.sitemap_received, 'sitemap_type': self.sitemap_type} def get_content(self): if self.sitemap_contents:",
"file_contents): self.sitemap_source_type = 'local' self.sitemap_received = True self.sitemap_contents = file_contents def error_receiving_file(self, message):",
"self.sitemap_file_name = filename def set_remote_file_from_requests(self, result): self.sitemap_source_type = 'remote' self.sitemap_received = True self.sitemap_contents",
"sitemap_headers = None sitemap_source_type = None # remote or local sitemap_file_error = None",
"False sitemap_lxml = None sitemap_plain_text = None sitemap_type = None # one of",
"self.sitemap_file_error = message def get_file_status_as_dict(self): return {'file_name': self.sitemap_file_name, 'file_headers': self.sitemap_headers, 'source_type': self.sitemap_source_type, 'file_error':",
"'file_error': self.sitemap_file_error, 'file_received': self.sitemap_received, 'sitemap_type': self.sitemap_type} def get_content(self): if self.sitemap_contents: return self.sitemap_contents.strip() else:",
"= None # remote or local sitemap_file_error = None sitemap_received = False sitemap_lxml",
"message): self.sitemap_file_error = message def error_receiving_remote_file(self, message): self.sitemap_source_type = 'remote' self.sitemap_file_error = message",
"self.sitemap_source_type = 'remote' self.sitemap_received = True self.sitemap_contents = result.text self.sitemap_headers = result.headers def",
"def error_receiving_file(self, message): self.sitemap_file_error = message def error_receiving_remote_file(self, message): self.sitemap_source_type = 'remote' self.sitemap_file_error",
"else: return False def set_lxml(self, lxml): self.sitemap_lxml = lxml def set_plain_text(self, content): self.sitemap_plain_text",
"True self.sitemap_contents = file_contents def error_receiving_file(self, message): self.sitemap_file_error = message def error_receiving_remote_file(self, message):",
"error_receiving_file(self, message): self.sitemap_file_error = message def error_receiving_remote_file(self, message): self.sitemap_source_type = 'remote' self.sitemap_file_error =",
"error_receiving_remote_file(self, message): self.sitemap_source_type = 'remote' self.sitemap_file_error = message def error_receiving_local_file(self, message): self.sitemap_source_type =",
"def __init__(self, filename): self.sitemap_file_name = filename def set_remote_file_from_requests(self, result): self.sitemap_source_type = 'remote' self.sitemap_received",
"= None sitemap_received = False sitemap_lxml = None sitemap_plain_text = None sitemap_type =",
"= file_contents def error_receiving_file(self, message): self.sitemap_file_error = message def error_receiving_remote_file(self, message): self.sitemap_source_type =",
"= False sitemap_lxml = None sitemap_plain_text = None sitemap_type = None # one",
"None sitemap_type = None # one of xml_sitemap, xml_sitemap_index, rss_feed, atom_feed, plain_text, #",
"= None # one of xml_sitemap, xml_sitemap_index, rss_feed, atom_feed, plain_text, # invalid_file_format def",
"self.sitemap_contents = result.text self.sitemap_headers = result.headers def set_local_file(self, file_contents): self.sitemap_source_type = 'local' self.sitemap_received",
"'local' self.sitemap_received = True self.sitemap_contents = file_contents def error_receiving_file(self, message): self.sitemap_file_error = message",
"= True self.sitemap_contents = file_contents def error_receiving_file(self, message): self.sitemap_file_error = message def error_receiving_remote_file(self,",
"file_contents def error_receiving_file(self, message): self.sitemap_file_error = message def error_receiving_remote_file(self, message): self.sitemap_source_type = 'remote'",
"self.sitemap_type} def get_content(self): if self.sitemap_contents: return self.sitemap_contents.strip() else: return None def has_lxml(self): if",
"self.sitemap_file_error = message def error_receiving_local_file(self, message): self.sitemap_source_type = 'local' self.sitemap_file_error = message def",
"plain_text, # invalid_file_format def __init__(self, filename): self.sitemap_file_name = filename def set_remote_file_from_requests(self, result): self.sitemap_source_type",
"return None def has_lxml(self): if self.sitemap_lxml: return True else: return False def set_lxml(self,",
"= filename def set_remote_file_from_requests(self, result): self.sitemap_source_type = 'remote' self.sitemap_received = True self.sitemap_contents =",
"sitemap_file_name = None sitemap_contents = None sitemap_headers = None sitemap_source_type = None #",
"= message def error_receiving_remote_file(self, message): self.sitemap_source_type = 'remote' self.sitemap_file_error = message def error_receiving_local_file(self,",
"error_receiving_local_file(self, message): self.sitemap_source_type = 'local' self.sitemap_file_error = message def get_file_status_as_dict(self): return {'file_name': self.sitemap_file_name,",
"'file_headers': self.sitemap_headers, 'source_type': self.sitemap_source_type, 'file_error': self.sitemap_file_error, 'file_received': self.sitemap_received, 'sitemap_type': self.sitemap_type} def get_content(self): if",
"= None sitemap_contents = None sitemap_headers = None sitemap_source_type = None # remote",
"self.sitemap_file_error, 'file_received': self.sitemap_received, 'sitemap_type': self.sitemap_type} def get_content(self): if self.sitemap_contents: return self.sitemap_contents.strip() else: return",
"rss_feed, atom_feed, plain_text, # invalid_file_format def __init__(self, filename): self.sitemap_file_name = filename def set_remote_file_from_requests(self,",
"None sitemap_contents = None sitemap_headers = None sitemap_source_type = None # remote or",
"SitemapFile: sitemap_file_name = None sitemap_contents = None sitemap_headers = None sitemap_source_type = None",
"None sitemap_headers = None sitemap_source_type = None # remote or local sitemap_file_error =",
"None # one of xml_sitemap, xml_sitemap_index, rss_feed, atom_feed, plain_text, # invalid_file_format def __init__(self,",
"'remote' self.sitemap_file_error = message def error_receiving_local_file(self, message): self.sitemap_source_type = 'local' self.sitemap_file_error = message",
"filename def set_remote_file_from_requests(self, result): self.sitemap_source_type = 'remote' self.sitemap_received = True self.sitemap_contents = result.text",
"None sitemap_received = False sitemap_lxml = None sitemap_plain_text = None sitemap_type = None",
"= 'remote' self.sitemap_file_error = message def error_receiving_local_file(self, message): self.sitemap_source_type = 'local' self.sitemap_file_error =",
"set_lxml(self, lxml): self.sitemap_lxml = lxml def set_plain_text(self, content): self.sitemap_plain_text = content def set_sitemap_type(self,",
"= None sitemap_type = None # one of xml_sitemap, xml_sitemap_index, rss_feed, atom_feed, plain_text,",
"self.sitemap_source_type, 'file_error': self.sitemap_file_error, 'file_received': self.sitemap_received, 'sitemap_type': self.sitemap_type} def get_content(self): if self.sitemap_contents: return self.sitemap_contents.strip()",
"def get_file_status_as_dict(self): return {'file_name': self.sitemap_file_name, 'file_headers': self.sitemap_headers, 'source_type': self.sitemap_source_type, 'file_error': self.sitemap_file_error, 'file_received': self.sitemap_received,",
"xml_sitemap, xml_sitemap_index, rss_feed, atom_feed, plain_text, # invalid_file_format def __init__(self, filename): self.sitemap_file_name = filename",
"= message def get_file_status_as_dict(self): return {'file_name': self.sitemap_file_name, 'file_headers': self.sitemap_headers, 'source_type': self.sitemap_source_type, 'file_error': self.sitemap_file_error,",
"True else: return False def set_lxml(self, lxml): self.sitemap_lxml = lxml def set_plain_text(self, content):",
"message def error_receiving_local_file(self, message): self.sitemap_source_type = 'local' self.sitemap_file_error = message def get_file_status_as_dict(self): return",
"return {'file_name': self.sitemap_file_name, 'file_headers': self.sitemap_headers, 'source_type': self.sitemap_source_type, 'file_error': self.sitemap_file_error, 'file_received': self.sitemap_received, 'sitemap_type': self.sitemap_type}",
"<gh_stars>1-10 class SitemapFile: sitemap_file_name = None sitemap_contents = None sitemap_headers = None sitemap_source_type",
"# one of xml_sitemap, xml_sitemap_index, rss_feed, atom_feed, plain_text, # invalid_file_format def __init__(self, filename):",
"sitemap_contents = None sitemap_headers = None sitemap_source_type = None # remote or local",
"= 'remote' self.sitemap_received = True self.sitemap_contents = result.text self.sitemap_headers = result.headers def set_local_file(self,",
"message def error_receiving_remote_file(self, message): self.sitemap_source_type = 'remote' self.sitemap_file_error = message def error_receiving_local_file(self, message):",
"message): self.sitemap_source_type = 'local' self.sitemap_file_error = message def get_file_status_as_dict(self): return {'file_name': self.sitemap_file_name, 'file_headers':",
"self.sitemap_source_type = 'remote' self.sitemap_file_error = message def error_receiving_local_file(self, message): self.sitemap_source_type = 'local' self.sitemap_file_error",
"self.sitemap_contents: return self.sitemap_contents.strip() else: return None def has_lxml(self): if self.sitemap_lxml: return True else:",
"None def has_lxml(self): if self.sitemap_lxml: return True else: return False def set_lxml(self, lxml):",
"self.sitemap_file_error = message def error_receiving_remote_file(self, message): self.sitemap_source_type = 'remote' self.sitemap_file_error = message def",
"def error_receiving_local_file(self, message): self.sitemap_source_type = 'local' self.sitemap_file_error = message def get_file_status_as_dict(self): return {'file_name':",
"# invalid_file_format def __init__(self, filename): self.sitemap_file_name = filename def set_remote_file_from_requests(self, result): self.sitemap_source_type =",
"of xml_sitemap, xml_sitemap_index, rss_feed, atom_feed, plain_text, # invalid_file_format def __init__(self, filename): self.sitemap_file_name =",
"else: return None def has_lxml(self): if self.sitemap_lxml: return True else: return False def",
"True self.sitemap_contents = result.text self.sitemap_headers = result.headers def set_local_file(self, file_contents): self.sitemap_source_type = 'local'",
"sitemap_type = None # one of xml_sitemap, xml_sitemap_index, rss_feed, atom_feed, plain_text, # invalid_file_format",
"= 'local' self.sitemap_file_error = message def get_file_status_as_dict(self): return {'file_name': self.sitemap_file_name, 'file_headers': self.sitemap_headers, 'source_type':",
"= lxml def set_plain_text(self, content): self.sitemap_plain_text = content def set_sitemap_type(self, sitemap_type): self.sitemap_type ="
] |
[
"PUT request or raise an exception.\"\"\" if json: r = requests.put( self._habitica_api+endpoint, headers={",
"self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def __str__(self): return \"HabiticaObject:",
"referencing itself in __setattr__ # self.__dict__[\"json\"] = attrdict.AttrMap() self.__dict__[\"_uuid\"] = uuid self.__dict__[\"_apikey\"] =",
"endpoint, json={}, query={}): \"\"\"Return json from POST request or raise an exception.\"\"\" r",
"headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json), params=query ) r.raise_for_status() return attrdict_or_list(r.json()) def _delete_or_except(self, endpoint):",
"= self._get_or_except(endpoint) else: self.__dict__[\"json\"] = attrdict.AttrMap() def __getstate__(self): return self.__dict__ def __setstate__(self, d):",
"class for custom HTTP requests commands for Habitica. \"\"\" def __init__(self, uuid, apikey,",
"raise an exception.\"\"\" r = requests.post( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json), params=query",
"json from PUT request or raise an exception.\"\"\" if json: r = requests.put(",
"__setstate__(self, d): # Use the ordinary, plain, boring, normal setattr so that #",
"__setattr__ # self.__dict__[\"json\"] = attrdict.AttrMap() self.__dict__[\"_uuid\"] = uuid self.__dict__[\"_apikey\"] = apikey self.__dict__[\"_habitica_api\"] =",
"_get_or_except(self, endpoint): \"\"\"Return json from GET request or raise an exception.\"\"\" r =",
"an exception.\"\"\" if json: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json)",
"r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json) ) else: r =",
"exception.\"\"\" r = requests.post( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json), params=query ) r.raise_for_status()",
"NotInHabiticaObject(Exception): pass class HabiticaObject(object): \"\"\"Abstract class for custom HTTP requests commands for Habitica.",
"attrdict.AttrMap(thing) elif type(thing) == list: return thing else: assert False, \"DON'T PANIC. Something",
"HabiticaObject(object): \"\"\"Abstract class for custom HTTP requests commands for Habitica. \"\"\" def __init__(self,",
"'x-api-key':self._apikey }, ) try: r.raise_for_status() except Exception as e: print(r) raise(e) return attrdict_or_list(r.json())",
"r.raise_for_status() return attrdict_or_list(r.json()) def _delete_or_except(self, endpoint): \"\"\"Return json from POST request or raise",
"except Exception as e: print(r) raise(e) return attrdict_or_list(r.json()) def _get_or_except(self, endpoint): \"\"\"Return json",
"return attrdict_or_list(r.json()) def _post_or_except(self, endpoint, json={}, query={}): \"\"\"Return json from POST request or",
"class NotInHabiticaObject(Exception): pass class HabiticaObject(object): \"\"\"Abstract class for custom HTTP requests commands for",
"headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def _post_or_except(self, endpoint, json={}, query={}):",
"json from POST request or raise an exception.\"\"\" r = requests.post( self._habitica_api+endpoint, headers={",
"exception.\"\"\" r = requests.get( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json())",
"headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json) ) else: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid,",
"raise an exception.\"\"\" r = requests.delete( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status()",
"thing else: assert False, \"DON'T PANIC. Something that wasn't a list or dict.\"",
"= attrdict.AttrMap() self.__dict__[\"_uuid\"] = uuid self.__dict__[\"_apikey\"] = apikey self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\" if json:",
"self.__dict__[\"json\"] = attrdict.AttrMap(json) elif endpoint: self.__dict__[\"json\"] = self._get_or_except(endpoint) else: self.__dict__[\"json\"] = attrdict.AttrMap() def",
"json=None): \"\"\"Return json from PUT request or raise an exception.\"\"\" if json: r",
"Habitica. \"\"\" def __init__(self, uuid, apikey, json=None, endpoint=None): # json must be created",
"type(thing) == dict: return attrdict.AttrMap(thing) elif type(thing) == list: return thing else: assert",
") r.raise_for_status() return attrdict_or_list(r.json()) def _post_or_except(self, endpoint, json={}, query={}): \"\"\"Return json from POST",
"attrdict_or_list(r.json()) def _get_or_except(self, endpoint): \"\"\"Return json from GET request or raise an exception.\"\"\"",
"Exception as e: print(r) raise(e) return attrdict_or_list(r.json()) def _get_or_except(self, endpoint): \"\"\"Return json from",
"False, \"DON'T PANIC. Something that wasn't a list or dict.\" class NotInHabiticaObject(Exception): pass",
"plain, boring, normal setattr so that # pickle doesn't freak out. super(HabiticaObject, self).__setattr__(\"__dict__\",",
"POST request or raise an exception.\"\"\" r = requests.delete( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey",
"Something that wasn't a list or dict.\" class NotInHabiticaObject(Exception): pass class HabiticaObject(object): \"\"\"Abstract",
"\"\"\"Return json from POST request or raise an exception.\"\"\" r = requests.post( self._habitica_api+endpoint,",
"json={}, query={}): \"\"\"Return json from POST request or raise an exception.\"\"\" r =",
"return thing else: assert False, \"DON'T PANIC. Something that wasn't a list or",
"elif endpoint: self.__dict__[\"json\"] = self._get_or_except(endpoint) else: self.__dict__[\"json\"] = attrdict.AttrMap() def __getstate__(self): return self.__dict__",
"json must be created with __dict__ to avoid referencing itself in __setattr__ #",
"self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json), params=query ) r.raise_for_status() return attrdict_or_list(r.json()) def _delete_or_except(self,",
"== dict: return attrdict.AttrMap(thing) elif type(thing) == list: return thing else: assert False,",
"dict: return attrdict.AttrMap(thing) elif type(thing) == list: return thing else: assert False, \"DON'T",
"requests commands for Habitica. \"\"\" def __init__(self, uuid, apikey, json=None, endpoint=None): # json",
"'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json), params=query ) r.raise_for_status() return attrdict_or_list(r.json()) def _delete_or_except(self, endpoint): \"\"\"Return",
"def _post_or_except(self, endpoint, json={}, query={}): \"\"\"Return json from POST request or raise an",
"d) def _put_or_except(self, endpoint, json=None): \"\"\"Return json from PUT request or raise an",
"a list or dict.\" class NotInHabiticaObject(Exception): pass class HabiticaObject(object): \"\"\"Abstract class for custom",
"print(r) raise(e) return attrdict_or_list(r.json()) def _get_or_except(self, endpoint): \"\"\"Return json from GET request or",
"config.HABITICA_URL+\"/api/v2\" if json: self.__dict__[\"json\"] = attrdict.AttrMap(json) elif endpoint: self.__dict__[\"json\"] = self._get_or_except(endpoint) else: self.__dict__[\"json\"]",
"to avoid referencing itself in __setattr__ # self.__dict__[\"json\"] = attrdict.AttrMap() self.__dict__[\"_uuid\"] = uuid",
"super(HabiticaObject, self).__setattr__(\"__dict__\", d) def _put_or_except(self, endpoint, json=None): \"\"\"Return json from PUT request or",
"provided config file import config import requests import attrdict import logging def attrdict_or_list(thing):",
"'x-api-key':self._apikey }, json=dict(json) ) else: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey },",
"self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json) ) else: r = requests.put( self._habitica_api+endpoint, headers={",
"requests.get( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def _post_or_except(self, endpoint,",
"ordinary, plain, boring, normal setattr so that # pickle doesn't freak out. super(HabiticaObject,",
"json=dict(json), params=query ) r.raise_for_status() return attrdict_or_list(r.json()) def _delete_or_except(self, endpoint): \"\"\"Return json from POST",
"return attrdict.AttrMap(thing) elif type(thing) == list: return thing else: assert False, \"DON'T PANIC.",
"}, ) try: r.raise_for_status() except Exception as e: print(r) raise(e) return attrdict_or_list(r.json()) def",
"e: print(r) raise(e) return attrdict_or_list(r.json()) def _get_or_except(self, endpoint): \"\"\"Return json from GET request",
"'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def _post_or_except(self, endpoint, json={}, query={}): \"\"\"Return",
"or raise an exception.\"\"\" r = requests.delete( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } )",
"elif type(thing) == list: return thing else: assert False, \"DON'T PANIC. Something that",
"'x-api-user':self._uuid, 'x-api-key':self._apikey }, ) try: r.raise_for_status() except Exception as e: print(r) raise(e) return",
"def _get_or_except(self, endpoint): \"\"\"Return json from GET request or raise an exception.\"\"\" r",
"created with __dict__ to avoid referencing itself in __setattr__ # self.__dict__[\"json\"] = attrdict.AttrMap()",
"custom HTTP requests commands for Habitica. \"\"\" def __init__(self, uuid, apikey, json=None, endpoint=None):",
") r.raise_for_status() return attrdict_or_list(r.json()) def _delete_or_except(self, endpoint): \"\"\"Return json from POST request or",
"self.__dict__[\"json\"] = attrdict.AttrMap() def __getstate__(self): return self.__dict__ def __setstate__(self, d): # Use the",
"apikey self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\" if json: self.__dict__[\"json\"] = attrdict.AttrMap(json) elif endpoint: self.__dict__[\"json\"] =",
"r = requests.delete( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def",
"as e: print(r) raise(e) return attrdict_or_list(r.json()) def _get_or_except(self, endpoint): \"\"\"Return json from GET",
"r = requests.get( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def",
"requests.delete( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def __str__(self): return",
"else: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, ) try: r.raise_for_status() except",
"\"DON'T PANIC. Something that wasn't a list or dict.\" class NotInHabiticaObject(Exception): pass class",
"attrdict import logging def attrdict_or_list(thing): if type(thing) == dict: return attrdict.AttrMap(thing) elif type(thing)",
"self.__dict__[\"_apikey\"] = apikey self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\" if json: self.__dict__[\"json\"] = attrdict.AttrMap(json) elif endpoint:",
"commands for Habitica. \"\"\" def __init__(self, uuid, apikey, json=None, endpoint=None): # json must",
"request or raise an exception.\"\"\" if json: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid,",
"POST request or raise an exception.\"\"\" r = requests.post( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey",
"that wasn't a list or dict.\" class NotInHabiticaObject(Exception): pass class HabiticaObject(object): \"\"\"Abstract class",
"__init__(self, uuid, apikey, json=None, endpoint=None): # json must be created with __dict__ to",
"self.__dict__[\"json\"] = attrdict.AttrMap() self.__dict__[\"_uuid\"] = uuid self.__dict__[\"_apikey\"] = apikey self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\" if",
"import requests import attrdict import logging def attrdict_or_list(thing): if type(thing) == dict: return",
"self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, ) try: r.raise_for_status() except Exception as e: print(r)",
"from GET request or raise an exception.\"\"\" r = requests.get( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid,",
"requests.post( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json), params=query ) r.raise_for_status() return attrdict_or_list(r.json()) def",
"\"\"\" def __init__(self, uuid, apikey, json=None, endpoint=None): # json must be created with",
"r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, ) try: r.raise_for_status() except Exception",
"'x-api-key':self._apikey }, json=dict(json), params=query ) r.raise_for_status() return attrdict_or_list(r.json()) def _delete_or_except(self, endpoint): \"\"\"Return json",
"class HabiticaObject(object): \"\"\"Abstract class for custom HTTP requests commands for Habitica. \"\"\" def",
"self._get_or_except(endpoint) else: self.__dict__[\"json\"] = attrdict.AttrMap() def __getstate__(self): return self.__dict__ def __setstate__(self, d): #",
"list: return thing else: assert False, \"DON'T PANIC. Something that wasn't a list",
"self.__dict__ def __setstate__(self, d): # Use the ordinary, plain, boring, normal setattr so",
"an exception.\"\"\" r = requests.delete( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return",
"= config.HABITICA_URL+\"/api/v2\" if json: self.__dict__[\"json\"] = attrdict.AttrMap(json) elif endpoint: self.__dict__[\"json\"] = self._get_or_except(endpoint) else:",
"else: self.__dict__[\"json\"] = attrdict.AttrMap() def __getstate__(self): return self.__dict__ def __setstate__(self, d): # Use",
"json: self.__dict__[\"json\"] = attrdict.AttrMap(json) elif endpoint: self.__dict__[\"json\"] = self._get_or_except(endpoint) else: self.__dict__[\"json\"] = attrdict.AttrMap()",
"GET request or raise an exception.\"\"\" r = requests.get( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey",
"else: assert False, \"DON'T PANIC. Something that wasn't a list or dict.\" class",
"def __setstate__(self, d): # Use the ordinary, plain, boring, normal setattr so that",
"return self.__dict__ def __setstate__(self, d): # Use the ordinary, plain, boring, normal setattr",
"def _put_or_except(self, endpoint, json=None): \"\"\"Return json from PUT request or raise an exception.\"\"\"",
"= requests.get( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def _post_or_except(self,",
"in __setattr__ # self.__dict__[\"json\"] = attrdict.AttrMap() self.__dict__[\"_uuid\"] = uuid self.__dict__[\"_apikey\"] = apikey self.__dict__[\"_habitica_api\"]",
"an exception.\"\"\" r = requests.get( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return",
"= uuid self.__dict__[\"_apikey\"] = apikey self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\" if json: self.__dict__[\"json\"] = attrdict.AttrMap(json)",
"def _delete_or_except(self, endpoint): \"\"\"Return json from POST request or raise an exception.\"\"\" r",
"json: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json) ) else: r",
"exception.\"\"\" if json: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json) )",
"\"\"\"Abstract class for custom HTTP requests commands for Habitica. \"\"\" def __init__(self, uuid,",
"}, json=dict(json) ) else: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, )",
"requests import attrdict import logging def attrdict_or_list(thing): if type(thing) == dict: return attrdict.AttrMap(thing)",
"config file import config import requests import attrdict import logging def attrdict_or_list(thing): if",
"the ordinary, plain, boring, normal setattr so that # pickle doesn't freak out.",
"wasn't a list or dict.\" class NotInHabiticaObject(Exception): pass class HabiticaObject(object): \"\"\"Abstract class for",
"exception.\"\"\" r = requests.delete( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json())",
"= requests.delete( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def __str__(self):",
"logging def attrdict_or_list(thing): if type(thing) == dict: return attrdict.AttrMap(thing) elif type(thing) == list:",
"or raise an exception.\"\"\" r = requests.get( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } )",
"return attrdict_or_list(r.json()) def _delete_or_except(self, endpoint): \"\"\"Return json from POST request or raise an",
"doesn't freak out. super(HabiticaObject, self).__setattr__(\"__dict__\", d) def _put_or_except(self, endpoint, json=None): \"\"\"Return json from",
"PANIC. Something that wasn't a list or dict.\" class NotInHabiticaObject(Exception): pass class HabiticaObject(object):",
"from POST request or raise an exception.\"\"\" r = requests.post( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid,",
"= requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json) ) else: r = requests.put(",
") else: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, ) try: r.raise_for_status()",
"dict.\" class NotInHabiticaObject(Exception): pass class HabiticaObject(object): \"\"\"Abstract class for custom HTTP requests commands",
"'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def _post_or_except(self, endpoint, json={}, query={}): \"\"\"Return json",
"or raise an exception.\"\"\" if json: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey",
"d): # Use the ordinary, plain, boring, normal setattr so that # pickle",
"json=dict(json) ) else: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, ) try:",
"type(thing) == list: return thing else: assert False, \"DON'T PANIC. Something that wasn't",
"self.__dict__[\"_uuid\"] = uuid self.__dict__[\"_apikey\"] = apikey self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\" if json: self.__dict__[\"json\"] =",
"or dict.\" class NotInHabiticaObject(Exception): pass class HabiticaObject(object): \"\"\"Abstract class for custom HTTP requests",
"import attrdict import logging def attrdict_or_list(thing): if type(thing) == dict: return attrdict.AttrMap(thing) elif",
"return attrdict_or_list(r.json()) def _get_or_except(self, endpoint): \"\"\"Return json from GET request or raise an",
"config import requests import attrdict import logging def attrdict_or_list(thing): if type(thing) == dict:",
"raise(e) return attrdict_or_list(r.json()) def _get_or_except(self, endpoint): \"\"\"Return json from GET request or raise",
"# User provided config file import config import requests import attrdict import logging",
"= requests.post( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json), params=query ) r.raise_for_status() return attrdict_or_list(r.json())",
"attrdict_or_list(r.json()) def _post_or_except(self, endpoint, json={}, query={}): \"\"\"Return json from POST request or raise",
"endpoint): \"\"\"Return json from POST request or raise an exception.\"\"\" r = requests.delete(",
"or raise an exception.\"\"\" r = requests.post( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json),",
"itself in __setattr__ # self.__dict__[\"json\"] = attrdict.AttrMap() self.__dict__[\"_uuid\"] = uuid self.__dict__[\"_apikey\"] = apikey",
"endpoint): \"\"\"Return json from GET request or raise an exception.\"\"\" r = requests.get(",
"r = requests.post( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json), params=query ) r.raise_for_status() return",
"that # pickle doesn't freak out. super(HabiticaObject, self).__setattr__(\"__dict__\", d) def _put_or_except(self, endpoint, json=None):",
"def attrdict_or_list(thing): if type(thing) == dict: return attrdict.AttrMap(thing) elif type(thing) == list: return",
"must be created with __dict__ to avoid referencing itself in __setattr__ # self.__dict__[\"json\"]",
"self.__dict__[\"json\"] = self._get_or_except(endpoint) else: self.__dict__[\"json\"] = attrdict.AttrMap() def __getstate__(self): return self.__dict__ def __setstate__(self,",
"request or raise an exception.\"\"\" r = requests.get( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }",
"be created with __dict__ to avoid referencing itself in __setattr__ # self.__dict__[\"json\"] =",
") try: r.raise_for_status() except Exception as e: print(r) raise(e) return attrdict_or_list(r.json()) def _get_or_except(self,",
"self).__setattr__(\"__dict__\", d) def _put_or_except(self, endpoint, json=None): \"\"\"Return json from PUT request or raise",
"self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def _post_or_except(self, endpoint, json={},",
"from POST request or raise an exception.\"\"\" r = requests.delete( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid,",
"for custom HTTP requests commands for Habitica. \"\"\" def __init__(self, uuid, apikey, json=None,",
"'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json) ) else: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey",
"query={}): \"\"\"Return json from POST request or raise an exception.\"\"\" r = requests.post(",
"= apikey self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\" if json: self.__dict__[\"json\"] = attrdict.AttrMap(json) elif endpoint: self.__dict__[\"json\"]",
"if type(thing) == dict: return attrdict.AttrMap(thing) elif type(thing) == list: return thing else:",
"def __init__(self, uuid, apikey, json=None, endpoint=None): # json must be created with __dict__",
"if json: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json) ) else:",
"} ) r.raise_for_status() return attrdict_or_list(r.json()) def _post_or_except(self, endpoint, json={}, query={}): \"\"\"Return json from",
"setattr so that # pickle doesn't freak out. super(HabiticaObject, self).__setattr__(\"__dict__\", d) def _put_or_except(self,",
"}, json=dict(json), params=query ) r.raise_for_status() return attrdict_or_list(r.json()) def _delete_or_except(self, endpoint): \"\"\"Return json from",
"# self.__dict__[\"json\"] = attrdict.AttrMap() self.__dict__[\"_uuid\"] = uuid self.__dict__[\"_apikey\"] = apikey self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\"",
"with __dict__ to avoid referencing itself in __setattr__ # self.__dict__[\"json\"] = attrdict.AttrMap() self.__dict__[\"_uuid\"]",
"= attrdict.AttrMap() def __getstate__(self): return self.__dict__ def __setstate__(self, d): # Use the ordinary,",
"attrdict.AttrMap() def __getstate__(self): return self.__dict__ def __setstate__(self, d): # Use the ordinary, plain,",
"out. super(HabiticaObject, self).__setattr__(\"__dict__\", d) def _put_or_except(self, endpoint, json=None): \"\"\"Return json from PUT request",
"an exception.\"\"\" r = requests.post( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json), params=query )",
"_delete_or_except(self, endpoint): \"\"\"Return json from POST request or raise an exception.\"\"\" r =",
"\"\"\"Return json from PUT request or raise an exception.\"\"\" if json: r =",
"assert False, \"DON'T PANIC. Something that wasn't a list or dict.\" class NotInHabiticaObject(Exception):",
"normal setattr so that # pickle doesn't freak out. super(HabiticaObject, self).__setattr__(\"__dict__\", d) def",
"HTTP requests commands for Habitica. \"\"\" def __init__(self, uuid, apikey, json=None, endpoint=None): #",
"__dict__ to avoid referencing itself in __setattr__ # self.__dict__[\"json\"] = attrdict.AttrMap() self.__dict__[\"_uuid\"] =",
"raise an exception.\"\"\" if json: r = requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey },",
"params=query ) r.raise_for_status() return attrdict_or_list(r.json()) def _delete_or_except(self, endpoint): \"\"\"Return json from POST request",
"= attrdict.AttrMap(json) elif endpoint: self.__dict__[\"json\"] = self._get_or_except(endpoint) else: self.__dict__[\"json\"] = attrdict.AttrMap() def __getstate__(self):",
"attrdict_or_list(r.json()) def _delete_or_except(self, endpoint): \"\"\"Return json from POST request or raise an exception.\"\"\"",
"raise an exception.\"\"\" r = requests.get( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status()",
"\"\"\"Return json from POST request or raise an exception.\"\"\" r = requests.delete( self._habitica_api+endpoint,",
"json from POST request or raise an exception.\"\"\" r = requests.delete( self._habitica_api+endpoint, headers={",
"__getstate__(self): return self.__dict__ def __setstate__(self, d): # Use the ordinary, plain, boring, normal",
"attrdict.AttrMap(json) elif endpoint: self.__dict__[\"json\"] = self._get_or_except(endpoint) else: self.__dict__[\"json\"] = attrdict.AttrMap() def __getstate__(self): return",
"uuid, apikey, json=None, endpoint=None): # json must be created with __dict__ to avoid",
"# json must be created with __dict__ to avoid referencing itself in __setattr__",
"try: r.raise_for_status() except Exception as e: print(r) raise(e) return attrdict_or_list(r.json()) def _get_or_except(self, endpoint):",
"pass class HabiticaObject(object): \"\"\"Abstract class for custom HTTP requests commands for Habitica. \"\"\"",
"attrdict_or_list(thing): if type(thing) == dict: return attrdict.AttrMap(thing) elif type(thing) == list: return thing",
"headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey } ) r.raise_for_status() return attrdict_or_list(r.json()) def __str__(self): return \"HabiticaObject: \\n\"+str(self.__dict__)",
"User provided config file import config import requests import attrdict import logging def",
"request or raise an exception.\"\"\" r = requests.delete( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }",
"file import config import requests import attrdict import logging def attrdict_or_list(thing): if type(thing)",
"request or raise an exception.\"\"\" r = requests.post( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey },",
"headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, ) try: r.raise_for_status() except Exception as e: print(r) raise(e)",
"endpoint=None): # json must be created with __dict__ to avoid referencing itself in",
"attrdict.AttrMap() self.__dict__[\"_uuid\"] = uuid self.__dict__[\"_apikey\"] = apikey self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\" if json: self.__dict__[\"json\"]",
"json from GET request or raise an exception.\"\"\" r = requests.get( self._habitica_api+endpoint, headers={",
"uuid self.__dict__[\"_apikey\"] = apikey self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\" if json: self.__dict__[\"json\"] = attrdict.AttrMap(json) elif",
"= requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, ) try: r.raise_for_status() except Exception as",
"import config import requests import attrdict import logging def attrdict_or_list(thing): if type(thing) ==",
"requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, ) try: r.raise_for_status() except Exception as e:",
"== list: return thing else: assert False, \"DON'T PANIC. Something that wasn't a",
"r.raise_for_status() except Exception as e: print(r) raise(e) return attrdict_or_list(r.json()) def _get_or_except(self, endpoint): \"\"\"Return",
"# Use the ordinary, plain, boring, normal setattr so that # pickle doesn't",
"\"\"\"Return json from GET request or raise an exception.\"\"\" r = requests.get( self._habitica_api+endpoint,",
"boring, normal setattr so that # pickle doesn't freak out. super(HabiticaObject, self).__setattr__(\"__dict__\", d)",
"apikey, json=None, endpoint=None): # json must be created with __dict__ to avoid referencing",
"freak out. super(HabiticaObject, self).__setattr__(\"__dict__\", d) def _put_or_except(self, endpoint, json=None): \"\"\"Return json from PUT",
"def __getstate__(self): return self.__dict__ def __setstate__(self, d): # Use the ordinary, plain, boring,",
"if json: self.__dict__[\"json\"] = attrdict.AttrMap(json) elif endpoint: self.__dict__[\"json\"] = self._get_or_except(endpoint) else: self.__dict__[\"json\"] =",
"requests.put( self._habitica_api+endpoint, headers={ 'x-api-user':self._uuid, 'x-api-key':self._apikey }, json=dict(json) ) else: r = requests.put( self._habitica_api+endpoint,",
"avoid referencing itself in __setattr__ # self.__dict__[\"json\"] = attrdict.AttrMap() self.__dict__[\"_uuid\"] = uuid self.__dict__[\"_apikey\"]",
"_post_or_except(self, endpoint, json={}, query={}): \"\"\"Return json from POST request or raise an exception.\"\"\"",
"Use the ordinary, plain, boring, normal setattr so that # pickle doesn't freak",
"endpoint: self.__dict__[\"json\"] = self._get_or_except(endpoint) else: self.__dict__[\"json\"] = attrdict.AttrMap() def __getstate__(self): return self.__dict__ def",
"pickle doesn't freak out. super(HabiticaObject, self).__setattr__(\"__dict__\", d) def _put_or_except(self, endpoint, json=None): \"\"\"Return json",
"r.raise_for_status() return attrdict_or_list(r.json()) def _post_or_except(self, endpoint, json={}, query={}): \"\"\"Return json from POST request",
"_put_or_except(self, endpoint, json=None): \"\"\"Return json from PUT request or raise an exception.\"\"\" if",
"from PUT request or raise an exception.\"\"\" if json: r = requests.put( self._habitica_api+endpoint,",
"json=None, endpoint=None): # json must be created with __dict__ to avoid referencing itself",
"so that # pickle doesn't freak out. super(HabiticaObject, self).__setattr__(\"__dict__\", d) def _put_or_except(self, endpoint,",
"# pickle doesn't freak out. super(HabiticaObject, self).__setattr__(\"__dict__\", d) def _put_or_except(self, endpoint, json=None): \"\"\"Return",
"endpoint, json=None): \"\"\"Return json from PUT request or raise an exception.\"\"\" if json:",
"import logging def attrdict_or_list(thing): if type(thing) == dict: return attrdict.AttrMap(thing) elif type(thing) ==",
"for Habitica. \"\"\" def __init__(self, uuid, apikey, json=None, endpoint=None): # json must be",
"list or dict.\" class NotInHabiticaObject(Exception): pass class HabiticaObject(object): \"\"\"Abstract class for custom HTTP",
"self.__dict__[\"_habitica_api\"] = config.HABITICA_URL+\"/api/v2\" if json: self.__dict__[\"json\"] = attrdict.AttrMap(json) elif endpoint: self.__dict__[\"json\"] = self._get_or_except(endpoint)"
] |
[
"Local Variables c = 1 def foo(): c = 2 return c c",
"# Exercise 33 - Local Variables c = 1 def foo(): c =",
"Exercise 33 - Local Variables c = 1 def foo(): c = 2",
"c = 1 def foo(): c = 2 return c c = 3",
"- Local Variables c = 1 def foo(): c = 2 return c",
"Variables c = 1 def foo(): c = 2 return c c =",
"= 1 def foo(): c = 2 return c c = 3 print(foo())",
"33 - Local Variables c = 1 def foo(): c = 2 return"
] |
[
"BOM to search. Typically the distributer BOM or a text schematic\") @click.command def",
"second_parts = regx.findall(st) nfirst = [] nsecond = [] for part in first_parts:",
"first_parts: if part not in nfirst: nfirst.append(part) for part in second_parts: if part",
"to. Should have the part number somewhere in the line\") @click.argument(\"--second\", \"-s\", type=str,",
"part not in parts: nsecond.append(part) print(\"Not in first: \", nfirst) print(\"Not in second:",
"required=True, help=\"Design BOM to compare to. Should have the part number somewhere in",
"python3 import sys, regex, click #first arg is a digikey csv cart #second",
"as f: st = f.read().strip() second_parts = regx.findall(st) nfirst = [] nsecond =",
"[] nsecond = [] for part in first_parts: if part not in nfirst:",
"for part in first_parts: if part not in nfirst: nfirst.append(part) for part in",
"distributer BOM or a text schematic\") @click.command def main(first, second): regx = regex.compile(match_pattern)",
"in first_parts: if part not in nfirst: nfirst.append(part) for part in second_parts: if",
"help=\"Main BOM to search. Typically the distributer BOM or a text schematic\") @click.command",
"help=\"Design BOM to compare to. Should have the part number somewhere in the",
"open(first, 'r') as f: first_parts = [part.strip() for part in f.read().strip().split('\\n')] with open(second,",
"number that looks like `match_pattern` \"\"\" #!/usr/bin/env python3 import sys, regex, click #first",
"f.read().strip().split('\\n')] with open(second, 'r') as f: st = f.read().strip() second_parts = regx.findall(st) nfirst",
"eoi partnumbers match_pattern = \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\", type=str, required=True, help=\"Design BOM to compare",
"like `match_pattern` \"\"\" #!/usr/bin/env python3 import sys, regex, click #first arg is a",
"part in f.read().strip().split('\\n')] with open(second, 'r') as f: st = f.read().strip() second_parts =",
"in first: \", nfirst) print(\"Not in second: \", nsecond) if __name__ == \"__main__\":",
"match_pattern = \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\", type=str, required=True, help=\"Design BOM to compare to. Should",
"def main(first, second): regx = regex.compile(match_pattern) with open(first, 'r') as f: first_parts =",
"part_finder.py Look through two files and search for an internal part number that",
"print(\"Not in first: \", nfirst) print(\"Not in second: \", nsecond) if __name__ ==",
"part in second_parts: if part not in parts: nsecond.append(part) print(\"Not in first: \",",
"= [] for part in first_parts: if part not in nfirst: nfirst.append(part) for",
"not in nfirst: nfirst.append(part) for part in second_parts: if part not in parts:",
"csv cart #second is a newline deliminated list of eoi partnumbers match_pattern =",
"digikey csv cart #second is a newline deliminated list of eoi partnumbers match_pattern",
"line\") @click.argument(\"--second\", \"-s\", type=str, required=True, help=\"Main BOM to search. Typically the distributer BOM",
"@click.argument(\"--second\", \"-s\", type=str, required=True, help=\"Main BOM to search. Typically the distributer BOM or",
"a newline deliminated list of eoi partnumbers match_pattern = \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\", type=str,",
"= f.read().strip() second_parts = regx.findall(st) nfirst = [] nsecond = [] for part",
"required=True, help=\"Main BOM to search. Typically the distributer BOM or a text schematic\")",
"BOM or a text schematic\") @click.command def main(first, second): regx = regex.compile(match_pattern) with",
"Typically the distributer BOM or a text schematic\") @click.command def main(first, second): regx",
"type=str, required=True, help=\"Main BOM to search. Typically the distributer BOM or a text",
"somewhere in the line\") @click.argument(\"--second\", \"-s\", type=str, required=True, help=\"Main BOM to search. Typically",
"compare to. Should have the part number somewhere in the line\") @click.argument(\"--second\", \"-s\",",
"cart #second is a newline deliminated list of eoi partnumbers match_pattern = \"\\w{3}-\\w{4}-\\w{2}\"",
"= [part.strip() for part in f.read().strip().split('\\n')] with open(second, 'r') as f: st =",
"nsecond = [] for part in first_parts: if part not in nfirst: nfirst.append(part)",
"to compare to. Should have the part number somewhere in the line\") @click.argument(\"--second\",",
"second): regx = regex.compile(match_pattern) with open(first, 'r') as f: first_parts = [part.strip() for",
"newline deliminated list of eoi partnumbers match_pattern = \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\", type=str, required=True,",
"or a text schematic\") @click.command def main(first, second): regx = regex.compile(match_pattern) with open(first,",
"regex.compile(match_pattern) with open(first, 'r') as f: first_parts = [part.strip() for part in f.read().strip().split('\\n')]",
"in f.read().strip().split('\\n')] with open(second, 'r') as f: st = f.read().strip() second_parts = regx.findall(st)",
"\"-s\", type=str, required=True, help=\"Main BOM to search. Typically the distributer BOM or a",
"in second_parts: if part not in parts: nsecond.append(part) print(\"Not in first: \", nfirst)",
"`match_pattern` \"\"\" #!/usr/bin/env python3 import sys, regex, click #first arg is a digikey",
"the line\") @click.argument(\"--second\", \"-s\", type=str, required=True, help=\"Main BOM to search. Typically the distributer",
"of eoi partnumbers match_pattern = \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\", type=str, required=True, help=\"Design BOM to",
"f.read().strip() second_parts = regx.findall(st) nfirst = [] nsecond = [] for part in",
"@click.command def main(first, second): regx = regex.compile(match_pattern) with open(first, 'r') as f: first_parts",
"for an internal part number that looks like `match_pattern` \"\"\" #!/usr/bin/env python3 import",
"is a newline deliminated list of eoi partnumbers match_pattern = \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\",",
"f: st = f.read().strip() second_parts = regx.findall(st) nfirst = [] nsecond = []",
"search for an internal part number that looks like `match_pattern` \"\"\" #!/usr/bin/env python3",
"search. Typically the distributer BOM or a text schematic\") @click.command def main(first, second):",
"= [] nsecond = [] for part in first_parts: if part not in",
"partnumbers match_pattern = \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\", type=str, required=True, help=\"Design BOM to compare to.",
"= regx.findall(st) nfirst = [] nsecond = [] for part in first_parts: if",
"'r') as f: first_parts = [part.strip() for part in f.read().strip().split('\\n')] with open(second, 'r')",
"if part not in nfirst: nfirst.append(part) for part in second_parts: if part not",
"\"-f\", type=str, required=True, help=\"Design BOM to compare to. Should have the part number",
"a digikey csv cart #second is a newline deliminated list of eoi partnumbers",
"as f: first_parts = [part.strip() for part in f.read().strip().split('\\n')] with open(second, 'r') as",
"text schematic\") @click.command def main(first, second): regx = regex.compile(match_pattern) with open(first, 'r') as",
"parts: nsecond.append(part) print(\"Not in first: \", nfirst) print(\"Not in second: \", nsecond) if",
"nfirst = [] nsecond = [] for part in first_parts: if part not",
"= \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\", type=str, required=True, help=\"Design BOM to compare to. Should have",
"part not in nfirst: nfirst.append(part) for part in second_parts: if part not in",
"list of eoi partnumbers match_pattern = \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\", type=str, required=True, help=\"Design BOM",
"= regex.compile(match_pattern) with open(first, 'r') as f: first_parts = [part.strip() for part in",
"first: \", nfirst) print(\"Not in second: \", nsecond) if __name__ == \"__main__\": main()",
"sys, regex, click #first arg is a digikey csv cart #second is a",
"import sys, regex, click #first arg is a digikey csv cart #second is",
"\"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\", type=str, required=True, help=\"Design BOM to compare to. Should have the",
"an internal part number that looks like `match_pattern` \"\"\" #!/usr/bin/env python3 import sys,",
"in nfirst: nfirst.append(part) for part in second_parts: if part not in parts: nsecond.append(part)",
"click #first arg is a digikey csv cart #second is a newline deliminated",
"f: first_parts = [part.strip() for part in f.read().strip().split('\\n')] with open(second, 'r') as f:",
"a text schematic\") @click.command def main(first, second): regx = regex.compile(match_pattern) with open(first, 'r')",
"@click.argument(\"--first\", \"-f\", type=str, required=True, help=\"Design BOM to compare to. Should have the part",
"[] for part in first_parts: if part not in nfirst: nfirst.append(part) for part",
"open(second, 'r') as f: st = f.read().strip() second_parts = regx.findall(st) nfirst = []",
"nfirst.append(part) for part in second_parts: if part not in parts: nsecond.append(part) print(\"Not in",
"[part.strip() for part in f.read().strip().split('\\n')] with open(second, 'r') as f: st = f.read().strip()",
"that looks like `match_pattern` \"\"\" #!/usr/bin/env python3 import sys, regex, click #first arg",
"not in parts: nsecond.append(part) print(\"Not in first: \", nfirst) print(\"Not in second: \",",
"in the line\") @click.argument(\"--second\", \"-s\", type=str, required=True, help=\"Main BOM to search. Typically the",
"in parts: nsecond.append(part) print(\"Not in first: \", nfirst) print(\"Not in second: \", nsecond)",
"to search. Typically the distributer BOM or a text schematic\") @click.command def main(first,",
"first_parts = [part.strip() for part in f.read().strip().split('\\n')] with open(second, 'r') as f: st",
"BOM to compare to. Should have the part number somewhere in the line\")",
"number somewhere in the line\") @click.argument(\"--second\", \"-s\", type=str, required=True, help=\"Main BOM to search.",
"have the part number somewhere in the line\") @click.argument(\"--second\", \"-s\", type=str, required=True, help=\"Main",
"the part number somewhere in the line\") @click.argument(\"--second\", \"-s\", type=str, required=True, help=\"Main BOM",
"'r') as f: st = f.read().strip() second_parts = regx.findall(st) nfirst = [] nsecond",
"if part not in parts: nsecond.append(part) print(\"Not in first: \", nfirst) print(\"Not in",
"arg is a digikey csv cart #second is a newline deliminated list of",
"regx = regex.compile(match_pattern) with open(first, 'r') as f: first_parts = [part.strip() for part",
"is a digikey csv cart #second is a newline deliminated list of eoi",
"part number somewhere in the line\") @click.argument(\"--second\", \"-s\", type=str, required=True, help=\"Main BOM to",
"files and search for an internal part number that looks like `match_pattern` \"\"\"",
"looks like `match_pattern` \"\"\" #!/usr/bin/env python3 import sys, regex, click #first arg is",
"regex, click #first arg is a digikey csv cart #second is a newline",
"Look through two files and search for an internal part number that looks",
"schematic\") @click.command def main(first, second): regx = regex.compile(match_pattern) with open(first, 'r') as f:",
"nfirst: nfirst.append(part) for part in second_parts: if part not in parts: nsecond.append(part) print(\"Not",
"the distributer BOM or a text schematic\") @click.command def main(first, second): regx =",
"deliminated list of eoi partnumbers match_pattern = \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\", \"-f\", type=str, required=True, help=\"Design",
"with open(first, 'r') as f: first_parts = [part.strip() for part in f.read().strip().split('\\n')] with",
"#second is a newline deliminated list of eoi partnumbers match_pattern = \"\\w{3}-\\w{4}-\\w{2}\" @click.argument(\"--first\",",
"second_parts: if part not in parts: nsecond.append(part) print(\"Not in first: \", nfirst) print(\"Not",
"\"\"\" #!/usr/bin/env python3 import sys, regex, click #first arg is a digikey csv",
"part number that looks like `match_pattern` \"\"\" #!/usr/bin/env python3 import sys, regex, click",
"internal part number that looks like `match_pattern` \"\"\" #!/usr/bin/env python3 import sys, regex,",
"two files and search for an internal part number that looks like `match_pattern`",
"for part in f.read().strip().split('\\n')] with open(second, 'r') as f: st = f.read().strip() second_parts",
"through two files and search for an internal part number that looks like",
"st = f.read().strip() second_parts = regx.findall(st) nfirst = [] nsecond = [] for",
"type=str, required=True, help=\"Design BOM to compare to. Should have the part number somewhere",
"Should have the part number somewhere in the line\") @click.argument(\"--second\", \"-s\", type=str, required=True,",
"and search for an internal part number that looks like `match_pattern` \"\"\" #!/usr/bin/env",
"nsecond.append(part) print(\"Not in first: \", nfirst) print(\"Not in second: \", nsecond) if __name__",
"for part in second_parts: if part not in parts: nsecond.append(part) print(\"Not in first:",
"regx.findall(st) nfirst = [] nsecond = [] for part in first_parts: if part",
"main(first, second): regx = regex.compile(match_pattern) with open(first, 'r') as f: first_parts = [part.strip()",
"#first arg is a digikey csv cart #second is a newline deliminated list",
"\"\"\" part_finder.py Look through two files and search for an internal part number",
"#!/usr/bin/env python3 import sys, regex, click #first arg is a digikey csv cart",
"part in first_parts: if part not in nfirst: nfirst.append(part) for part in second_parts:",
"with open(second, 'r') as f: st = f.read().strip() second_parts = regx.findall(st) nfirst ="
] |
[
"driver.close() def read_ini_settings(): setting_temp = [] setting = {} num_of_value = 1 seperate_symbol",
"(item.startswith('#') or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) # Will result with a 2D list # Convert",
"item in f: if not (item.startswith('#') or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) # Will result with",
"15 FINAL_VIEWING_TIME = 60*60 # 1 hour def main(): # Initilize settings setting",
"== True: sign_in(driver, setting) else: print(LINE+\"\\nPlaying anonymously...\") # Get text and type for",
"\"login-form-submit\" TARGET_WORD_SLTCR = \"#row1 > span.highlight\" INPUT_FIELD_ID = \"inputfield\" # SPECIAL TYPE INPUT",
"y in range(num_of_value): key = setting_temp[x][y].strip() value = setting_temp[x][y+1].strip() if value.lower() == 'true':",
"15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable to sign in! Playing anonymously...') else: print('Sign in successfully!') def",
"= '=' # Open and read setting.ini with open('data/setting.ini') as f: for item",
"OTHERS LINE = '<>'*20 WORDS_PER_LINE = 12 RESULT_VIEWING_TIME = 15 FINAL_VIEWING_TIME = 60*60",
"list to dictionary for x in range(len(setting_temp)): for y in range(num_of_value): key =",
"sys.path.append(parentdir) # URLS SIGNIN_URL = \"https://10fastfingers.com/login\" # ELEMENTS SELECTORS USERNAME_ID = \"UserEmail\" PASSWORD_ID",
"{} num_of_value = 1 seperate_symbol = '=' # Open and read setting.ini with",
"with open('data/setting.ini') as f: for item in f: if not (item.startswith('#') or item.startswith('\\n')):",
"# Fill text fields with account info driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) #",
"Will result with a 2D list # Convert setting list to dictionary for",
"USERNAME_ID = \"UserEmail\" PASSWORD_ID = \"<PASSWORD>\" LOGIN_BTN_ID = \"login-form-submit\" TARGET_WORD_SLTCR = \"#row1 >",
"settings setting = read_ini_settings() typing_test_url = setting['typing_test_url'] use_account = setting['use_account'] word_per_sec = float(setting['word_per_sec'])",
"word_count, extracted_text): if word_count % WORDS_PER_LINE == 0: extracted_text += target_word + '\\n'",
"'=' # Open and read setting.ini with open('data/setting.ini') as f: for item in",
"# Open and read setting.ini with open('data/setting.ini') as f: for item in f:",
"# Sign in if use_account == True: sign_in(driver, setting) else: print(LINE+\"\\nPlaying anonymously...\") #",
"setting_temp = [] setting = {} num_of_value = 1 seperate_symbol = '=' #",
"int(setting['num_test_loop']) extract_text_to_file = setting['extract_text_to_file'] extracted_text = '' driver = webdriver.Chrome('bin/chromedriver.exe') # Sign in",
"WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try logging in driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except:",
"and read setting.ini with open('data/setting.ini') as f: for item in f: if not",
"LOGIN_BTN_ID = \"login-form-submit\" TARGET_WORD_SLTCR = \"#row1 > span.highlight\" INPUT_FIELD_ID = \"inputfield\" # SPECIAL",
"print('\\n'+LINE+\"\\nALL DONE! Check your browser for results!\") # Retain browser for viewing time.sleep(FINAL_VIEWING_TIME)",
"setting['email'] password = setting['password'] # Fill text fields with account info driver.get(SIGNIN_URL) WebDriverWait(driver,",
"= \"<PASSWORD>\" LOGIN_BTN_ID = \"login-form-submit\" TARGET_WORD_SLTCR = \"#row1 > span.highlight\" INPUT_FIELD_ID = \"inputfield\"",
"import WebDriverWait from selenium.webdriver.support import expected_conditions as EC import time # Init working",
"selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC import time # Init",
"read_ini_settings() typing_test_url = setting['typing_test_url'] use_account = setting['use_account'] word_per_sec = float(setting['word_per_sec']) num_test_loop = int(setting['num_test_loop'])",
"= setting['password'] # Fill text fields with account info driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email)",
"driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY! It's typing... \"+LINE) while True: target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text",
"import expected_conditions as EC import time # Init working folder currentdir = os.path.dirname(os.path.realpath(__file__))",
"LINE = '<>'*20 WORDS_PER_LINE = 12 RESULT_VIEWING_TIME = 15 FINAL_VIEWING_TIME = 60*60 #",
"browser for results!\") # Retain browser for viewing time.sleep(FINAL_VIEWING_TIME) driver.close() def read_ini_settings(): setting_temp",
"words from test to file if extract_text_to_file: with open('data/competition_text_file.txt', 'w') as text_f: text_f.write(extracted_text)",
"'true': value = True setting[key] = value return setting def sign_in(driver, setting): email",
"= True setting[key] = value return setting def sign_in(driver, setting): email = setting['email']",
"value.lower() == 'true': value = True setting[key] = value return setting def sign_in(driver,",
"= 12 RESULT_VIEWING_TIME = 15 FINAL_VIEWING_TIME = 60*60 # 1 hour def main():",
"TARGET_WORD_SLTCR))) print(\"\\nYAY! It's typing... \"+LINE) while True: target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word !=",
"SPECIAL TYPE INPUT SPACE_KEY = \"\\ue00d\" # OTHERS LINE = '<>'*20 WORDS_PER_LINE =",
"= \"https://10fastfingers.com/login\" # ELEMENTS SELECTORS USERNAME_ID = \"UserEmail\" PASSWORD_ID = \"<PASSWORD>\" LOGIN_BTN_ID =",
"word_per_sec = float(setting['word_per_sec']) num_test_loop = int(setting['num_test_loop']) extract_text_to_file = setting['extract_text_to_file'] extracted_text = '' driver",
"# extract words from test to file if extract_text_to_file: with open('data/competition_text_file.txt', 'w') as",
"read setting.ini with open('data/setting.ini') as f: for item in f: if not (item.startswith('#')",
"WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY! It's typing... \"+LINE) while True: target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if",
"if target_word != '': type_word(target_word, driver) if extract_text_to_file == True: word_count += 1",
"extracted_text += target_word + '\\n' else: extracted_text += target_word+' ' return extracted_text main()",
"# Initilize settings setting = read_ini_settings() typing_test_url = setting['typing_test_url'] use_account = setting['use_account'] word_per_sec",
"as EC import time # Init working folder currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir =",
"= \"login-form-submit\" TARGET_WORD_SLTCR = \"#row1 > span.highlight\" INPUT_FIELD_ID = \"inputfield\" # SPECIAL TYPE",
"# SPECIAL TYPE INPUT SPACE_KEY = \"\\ue00d\" # OTHERS LINE = '<>'*20 WORDS_PER_LINE",
"with open('data/competition_text_file.txt', 'w') as text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE! Check your browser for results!\")",
"Retain browser for viewing time.sleep(FINAL_VIEWING_TIME) driver.close() def read_ini_settings(): setting_temp = [] setting =",
"text and type for i in range(num_test_loop): word_count = 0 driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR,",
"read_ini_settings(): setting_temp = [] setting = {} num_of_value = 1 seperate_symbol = '='",
"email = setting['email'] password = setting['password'] # Fill text fields with account info",
"extract_text_to_file = setting['extract_text_to_file'] extracted_text = '' driver = webdriver.Chrome('bin/chromedriver.exe') # Sign in if",
"import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import",
"in range(num_of_value): key = setting_temp[x][y].strip() value = setting_temp[x][y+1].strip() if value.lower() == 'true': value",
"value return setting def sign_in(driver, setting): email = setting['email'] password = setting['password'] #",
"results!\") # Retain browser for viewing time.sleep(FINAL_VIEWING_TIME) driver.close() def read_ini_settings(): setting_temp = []",
"from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from",
"0: extracted_text += target_word + '\\n' else: extracted_text += target_word+' ' return extracted_text",
"!= '': type_word(target_word, driver) if extract_text_to_file == True: word_count += 1 extracted_text =",
"for item in f: if not (item.startswith('#') or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) # Will result",
"# Init working folder currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) # URLS",
"True setting[key] = value return setting def sign_in(driver, setting): email = setting['email'] password",
"for y in range(num_of_value): key = setting_temp[x][y].strip() value = setting_temp[x][y+1].strip() if value.lower() ==",
"open('data/competition_text_file.txt', 'w') as text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE! Check your browser for results!\") #",
"setting['password'] # Fill text fields with account info driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password)",
"extracted_text = accumulate_words(target_word, word_count, extracted_text) time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME) break # extract words from",
"f: if not (item.startswith('#') or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) # Will result with a 2D",
"sign in! Playing anonymously...') else: print('Sign in successfully!') def type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def",
"item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) # Will result with a 2D list # Convert setting list",
"TARGET_WORD_SLTCR = \"#row1 > span.highlight\" INPUT_FIELD_ID = \"inputfield\" # SPECIAL TYPE INPUT SPACE_KEY",
"= 15 FINAL_VIEWING_TIME = 60*60 # 1 hour def main(): # Initilize settings",
"setting = read_ini_settings() typing_test_url = setting['typing_test_url'] use_account = setting['use_account'] word_per_sec = float(setting['word_per_sec']) num_test_loop",
"URLS SIGNIN_URL = \"https://10fastfingers.com/login\" # ELEMENTS SELECTORS USERNAME_ID = \"UserEmail\" PASSWORD_ID = \"<PASSWORD>\"",
"print(\"\\nYAY! It's typing... \"+LINE) while True: target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word != '':",
"setting): email = setting['email'] password = setting['password'] # Fill text fields with account",
"expected_conditions as EC import time # Init working folder currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir",
"test to file if extract_text_to_file: with open('data/competition_text_file.txt', 'w') as text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE!",
"not (item.startswith('#') or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) # Will result with a 2D list #",
"FINAL_VIEWING_TIME = 60*60 # 1 hour def main(): # Initilize settings setting =",
"= \"UserEmail\" PASSWORD_ID = \"<PASSWORD>\" LOGIN_BTN_ID = \"login-form-submit\" TARGET_WORD_SLTCR = \"#row1 > span.highlight\"",
"= driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word != '': type_word(target_word, driver) if extract_text_to_file == True: word_count",
"Init working folder currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) # URLS SIGNIN_URL",
"60*60 # 1 hour def main(): # Initilize settings setting = read_ini_settings() typing_test_url",
"if word_count % WORDS_PER_LINE == 0: extracted_text += target_word + '\\n' else: extracted_text",
"[] setting = {} num_of_value = 1 seperate_symbol = '=' # Open and",
"x in range(len(setting_temp)): for y in range(num_of_value): key = setting_temp[x][y].strip() value = setting_temp[x][y+1].strip()",
"\"<PASSWORD>\" LOGIN_BTN_ID = \"login-form-submit\" TARGET_WORD_SLTCR = \"#row1 > span.highlight\" INPUT_FIELD_ID = \"inputfield\" #",
"extract words from test to file if extract_text_to_file: with open('data/competition_text_file.txt', 'w') as text_f:",
"import time # Init working folder currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir)",
"WORDS_PER_LINE == 0: extracted_text += target_word + '\\n' else: extracted_text += target_word+' '",
"DONE! Check your browser for results!\") # Retain browser for viewing time.sleep(FINAL_VIEWING_TIME) driver.close()",
"open('data/setting.ini') as f: for item in f: if not (item.startswith('#') or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol))",
"= \"\\ue00d\" # OTHERS LINE = '<>'*20 WORDS_PER_LINE = 12 RESULT_VIEWING_TIME = 15",
"time # Init working folder currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) #",
"in! Playing anonymously...') else: print('Sign in successfully!') def type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word,",
"anonymously...') else: print('Sign in successfully!') def type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word, word_count, extracted_text):",
"os.path.dirname(currentdir) sys.path.append(parentdir) # URLS SIGNIN_URL = \"https://10fastfingers.com/login\" # ELEMENTS SELECTORS USERNAME_ID = \"UserEmail\"",
"target_word != '': type_word(target_word, driver) if extract_text_to_file == True: word_count += 1 extracted_text",
"extract_text_to_file: with open('data/competition_text_file.txt', 'w') as text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE! Check your browser for",
"in if use_account == True: sign_in(driver, setting) else: print(LINE+\"\\nPlaying anonymously...\") # Get text",
"webdriver.Chrome('bin/chromedriver.exe') # Sign in if use_account == True: sign_in(driver, setting) else: print(LINE+\"\\nPlaying anonymously...\")",
"Convert setting list to dictionary for x in range(len(setting_temp)): for y in range(num_of_value):",
"RESULT_VIEWING_TIME = 15 FINAL_VIEWING_TIME = 60*60 # 1 hour def main(): # Initilize",
"Get text and type for i in range(num_test_loop): word_count = 0 driver.get(typing_test_url) WebDriverWait(driver,",
"while True: target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word != '': type_word(target_word, driver) if extract_text_to_file",
"target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word != '': type_word(target_word, driver) if extract_text_to_file == True:",
"in range(num_test_loop): word_count = 0 driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY! It's typing... \"+LINE)",
"type_word(target_word, driver) if extract_text_to_file == True: word_count += 1 extracted_text = accumulate_words(target_word, word_count,",
"2D list # Convert setting list to dictionary for x in range(len(setting_temp)): for",
"accumulate_words(target_word, word_count, extracted_text): if word_count % WORDS_PER_LINE == 0: extracted_text += target_word +",
"fields with account info driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try logging in",
"if extract_text_to_file == True: word_count += 1 extracted_text = accumulate_words(target_word, word_count, extracted_text) time.sleep(word_per_sec)",
"selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support",
"Check your browser for results!\") # Retain browser for viewing time.sleep(FINAL_VIEWING_TIME) driver.close() def",
"range(num_test_loop): word_count = 0 driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY! It's typing... \"+LINE) while",
"Sign in if use_account == True: sign_in(driver, setting) else: print(LINE+\"\\nPlaying anonymously...\") # Get",
"def accumulate_words(target_word, word_count, extracted_text): if word_count % WORDS_PER_LINE == 0: extracted_text += target_word",
"SELECTORS USERNAME_ID = \"UserEmail\" PASSWORD_ID = \"<PASSWORD>\" LOGIN_BTN_ID = \"login-form-submit\" TARGET_WORD_SLTCR = \"#row1",
"your browser for results!\") # Retain browser for viewing time.sleep(FINAL_VIEWING_TIME) driver.close() def read_ini_settings():",
"# ELEMENTS SELECTORS USERNAME_ID = \"UserEmail\" PASSWORD_ID = \"<PASSWORD>\" LOGIN_BTN_ID = \"login-form-submit\" TARGET_WORD_SLTCR",
"\"\\ue00d\" # OTHERS LINE = '<>'*20 WORDS_PER_LINE = 12 RESULT_VIEWING_TIME = 15 FINAL_VIEWING_TIME",
"12 RESULT_VIEWING_TIME = 15 FINAL_VIEWING_TIME = 60*60 # 1 hour def main(): #",
"hour def main(): # Initilize settings setting = read_ini_settings() typing_test_url = setting['typing_test_url'] use_account",
"if use_account == True: sign_in(driver, setting) else: print(LINE+\"\\nPlaying anonymously...\") # Get text and",
"print('Unable to sign in! Playing anonymously...') else: print('Sign in successfully!') def type_word(target_word, driver):",
"extracted_text = '' driver = webdriver.Chrome('bin/chromedriver.exe') # Sign in if use_account == True:",
"text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE! Check your browser for results!\") # Retain browser for viewing",
"WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable to sign in! Playing anonymously...') else: print('Sign in successfully!')",
"= [] setting = {} num_of_value = 1 seperate_symbol = '=' # Open",
"currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) # URLS SIGNIN_URL = \"https://10fastfingers.com/login\" #",
"range(len(setting_temp)): for y in range(num_of_value): key = setting_temp[x][y].strip() value = setting_temp[x][y+1].strip() if value.lower()",
"text fields with account info driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try logging",
"typing... \"+LINE) while True: target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word != '': type_word(target_word, driver)",
"time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME) break # extract words from test to file if extract_text_to_file:",
"as text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE! Check your browser for results!\") # Retain browser",
"break # extract words from test to file if extract_text_to_file: with open('data/competition_text_file.txt', 'w')",
"working folder currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) # URLS SIGNIN_URL =",
"It's typing... \"+LINE) while True: target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word != '': type_word(target_word,",
"or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) # Will result with a 2D list # Convert setting",
"== 0: extracted_text += target_word + '\\n' else: extracted_text += target_word+' ' return",
"selenium.webdriver.support import expected_conditions as EC import time # Init working folder currentdir =",
"= '' driver = webdriver.Chrome('bin/chromedriver.exe') # Sign in if use_account == True: sign_in(driver,",
"# Get text and type for i in range(num_test_loop): word_count = 0 driver.get(typing_test_url)",
"# 1 hour def main(): # Initilize settings setting = read_ini_settings() typing_test_url =",
"account info driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try logging in driver.find_element_by_id(LOGIN_BTN_ID).click() try:",
"successfully!') def type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word, word_count, extracted_text): if word_count % WORDS_PER_LINE",
"driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try logging in driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable to sign",
"password = setting['password'] # Fill text fields with account info driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID)))",
"<reponame>gordonnguyen/10fastfingers-auto-type import os,sys,inspect from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui",
"= setting['use_account'] word_per_sec = float(setting['word_per_sec']) num_test_loop = int(setting['num_test_loop']) extract_text_to_file = setting['extract_text_to_file'] extracted_text =",
"print('Sign in successfully!') def type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word, word_count, extracted_text): if word_count",
"type for i in range(num_test_loop): word_count = 0 driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY!",
"word_count = 0 driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY! It's typing... \"+LINE) while True:",
"dictionary for x in range(len(setting_temp)): for y in range(num_of_value): key = setting_temp[x][y].strip() value",
"use_account = setting['use_account'] word_per_sec = float(setting['word_per_sec']) num_test_loop = int(setting['num_test_loop']) extract_text_to_file = setting['extract_text_to_file'] extracted_text",
"to dictionary for x in range(len(setting_temp)): for y in range(num_of_value): key = setting_temp[x][y].strip()",
"def type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word, word_count, extracted_text): if word_count % WORDS_PER_LINE ==",
"in range(len(setting_temp)): for y in range(num_of_value): key = setting_temp[x][y].strip() value = setting_temp[x][y+1].strip() if",
"return setting def sign_in(driver, setting): email = setting['email'] password = setting['password'] # Fill",
"= accumulate_words(target_word, word_count, extracted_text) time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME) break # extract words from test",
"if extract_text_to_file: with open('data/competition_text_file.txt', 'w') as text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE! Check your browser",
"except: print('Unable to sign in! Playing anonymously...') else: print('Sign in successfully!') def type_word(target_word,",
"\"#row1 > span.highlight\" INPUT_FIELD_ID = \"inputfield\" # SPECIAL TYPE INPUT SPACE_KEY = \"\\ue00d\"",
"from test to file if extract_text_to_file: with open('data/competition_text_file.txt', 'w') as text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL",
"from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC import time #",
"logging in driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable to sign in! Playing anonymously...')",
"setting = {} num_of_value = 1 seperate_symbol = '=' # Open and read",
"print(LINE+\"\\nPlaying anonymously...\") # Get text and type for i in range(num_test_loop): word_count =",
"# Will result with a 2D list # Convert setting list to dictionary",
"seperate_symbol = '=' # Open and read setting.ini with open('data/setting.ini') as f: for",
"= \"inputfield\" # SPECIAL TYPE INPUT SPACE_KEY = \"\\ue00d\" # OTHERS LINE =",
"setting_temp[x][y].strip() value = setting_temp[x][y+1].strip() if value.lower() == 'true': value = True setting[key] =",
"def read_ini_settings(): setting_temp = [] setting = {} num_of_value = 1 seperate_symbol =",
"= int(setting['num_test_loop']) extract_text_to_file = setting['extract_text_to_file'] extracted_text = '' driver = webdriver.Chrome('bin/chromedriver.exe') # Sign",
"and type for i in range(num_test_loop): word_count = 0 driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR)))",
"= float(setting['word_per_sec']) num_test_loop = int(setting['num_test_loop']) extract_text_to_file = setting['extract_text_to_file'] extracted_text = '' driver =",
"main(): # Initilize settings setting = read_ini_settings() typing_test_url = setting['typing_test_url'] use_account = setting['use_account']",
"Fill text fields with account info driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try",
"float(setting['word_per_sec']) num_test_loop = int(setting['num_test_loop']) extract_text_to_file = setting['extract_text_to_file'] extracted_text = '' driver = webdriver.Chrome('bin/chromedriver.exe')",
"driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word, word_count, extracted_text): if word_count % WORDS_PER_LINE == 0: extracted_text",
"setting_temp.append(item.split(seperate_symbol)) # Will result with a 2D list # Convert setting list to",
"Open and read setting.ini with open('data/setting.ini') as f: for item in f: if",
"result with a 2D list # Convert setting list to dictionary for x",
"'' driver = webdriver.Chrome('bin/chromedriver.exe') # Sign in if use_account == True: sign_in(driver, setting)",
"ELEMENTS SELECTORS USERNAME_ID = \"UserEmail\" PASSWORD_ID = \"<PASSWORD>\" LOGIN_BTN_ID = \"login-form-submit\" TARGET_WORD_SLTCR =",
"Try logging in driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable to sign in! Playing",
"else: print('Sign in successfully!') def type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word, word_count, extracted_text): if",
"import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC import",
"key = setting_temp[x][y].strip() value = setting_temp[x][y+1].strip() if value.lower() == 'true': value = True",
"'w') as text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE! Check your browser for results!\") # Retain",
"+= 1 extracted_text = accumulate_words(target_word, word_count, extracted_text) time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME) break # extract",
"num_test_loop = int(setting['num_test_loop']) extract_text_to_file = setting['extract_text_to_file'] extracted_text = '' driver = webdriver.Chrome('bin/chromedriver.exe') #",
"from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as",
"True: word_count += 1 extracted_text = accumulate_words(target_word, word_count, extracted_text) time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME) break",
"use_account == True: sign_in(driver, setting) else: print(LINE+\"\\nPlaying anonymously...\") # Get text and type",
"> span.highlight\" INPUT_FIELD_ID = \"inputfield\" # SPECIAL TYPE INPUT SPACE_KEY = \"\\ue00d\" #",
"SIGNIN_URL = \"https://10fastfingers.com/login\" # ELEMENTS SELECTORS USERNAME_ID = \"UserEmail\" PASSWORD_ID = \"<PASSWORD>\" LOGIN_BTN_ID",
"to sign in! Playing anonymously...') else: print('Sign in successfully!') def type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY)",
"folder currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) # URLS SIGNIN_URL = \"https://10fastfingers.com/login\"",
"setting['use_account'] word_per_sec = float(setting['word_per_sec']) num_test_loop = int(setting['num_test_loop']) extract_text_to_file = setting['extract_text_to_file'] extracted_text = ''",
"SPACE_KEY = \"\\ue00d\" # OTHERS LINE = '<>'*20 WORDS_PER_LINE = 12 RESULT_VIEWING_TIME =",
"True: sign_in(driver, setting) else: print(LINE+\"\\nPlaying anonymously...\") # Get text and type for i",
"TYPE INPUT SPACE_KEY = \"\\ue00d\" # OTHERS LINE = '<>'*20 WORDS_PER_LINE = 12",
"type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word, word_count, extracted_text): if word_count % WORDS_PER_LINE == 0:",
"text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE! Check your browser for results!\") # Retain browser for",
"# Try logging in driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable to sign in!",
"= read_ini_settings() typing_test_url = setting['typing_test_url'] use_account = setting['use_account'] word_per_sec = float(setting['word_per_sec']) num_test_loop =",
"By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC import time",
"# URLS SIGNIN_URL = \"https://10fastfingers.com/login\" # ELEMENTS SELECTORS USERNAME_ID = \"UserEmail\" PASSWORD_ID =",
"# Convert setting list to dictionary for x in range(len(setting_temp)): for y in",
"if value.lower() == 'true': value = True setting[key] = value return setting def",
"= 1 seperate_symbol = '=' # Open and read setting.ini with open('data/setting.ini') as",
"== True: word_count += 1 extracted_text = accumulate_words(target_word, word_count, extracted_text) time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME)",
"os,sys,inspect from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait",
"setting) else: print(LINE+\"\\nPlaying anonymously...\") # Get text and type for i in range(num_test_loop):",
"num_of_value = 1 seperate_symbol = '=' # Open and read setting.ini with open('data/setting.ini')",
"webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions",
"\"UserEmail\" PASSWORD_ID = \"<PASSWORD>\" LOGIN_BTN_ID = \"login-form-submit\" TARGET_WORD_SLTCR = \"#row1 > span.highlight\" INPUT_FIELD_ID",
"to file if extract_text_to_file: with open('data/competition_text_file.txt', 'w') as text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE! Check",
"15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try logging in driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable",
"# Retain browser for viewing time.sleep(FINAL_VIEWING_TIME) driver.close() def read_ini_settings(): setting_temp = [] setting",
"viewing time.sleep(FINAL_VIEWING_TIME) driver.close() def read_ini_settings(): setting_temp = [] setting = {} num_of_value =",
"sign_in(driver, setting): email = setting['email'] password = setting['password'] # Fill text fields with",
"extract_text_to_file == True: word_count += 1 extracted_text = accumulate_words(target_word, word_count, extracted_text) time.sleep(word_per_sec) else:",
"f: for item in f: if not (item.startswith('#') or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) # Will",
"== 'true': value = True setting[key] = value return setting def sign_in(driver, setting):",
"= '<>'*20 WORDS_PER_LINE = 12 RESULT_VIEWING_TIME = 15 FINAL_VIEWING_TIME = 60*60 # 1",
"in f: if not (item.startswith('#') or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) # Will result with a",
"WebDriverWait from selenium.webdriver.support import expected_conditions as EC import time # Init working folder",
"span.highlight\" INPUT_FIELD_ID = \"inputfield\" # SPECIAL TYPE INPUT SPACE_KEY = \"\\ue00d\" # OTHERS",
"1 seperate_symbol = '=' # Open and read setting.ini with open('data/setting.ini') as f:",
"word_count % WORDS_PER_LINE == 0: extracted_text += target_word + '\\n' else: extracted_text +=",
"info driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try logging in driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver,",
"= setting['extract_text_to_file'] extracted_text = '' driver = webdriver.Chrome('bin/chromedriver.exe') # Sign in if use_account",
"driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try logging in driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL))",
"INPUT SPACE_KEY = \"\\ue00d\" # OTHERS LINE = '<>'*20 WORDS_PER_LINE = 12 RESULT_VIEWING_TIME",
"parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) # URLS SIGNIN_URL = \"https://10fastfingers.com/login\" # ELEMENTS SELECTORS USERNAME_ID",
"word_count, extracted_text) time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME) break # extract words from test to file",
"Initilize settings setting = read_ini_settings() typing_test_url = setting['typing_test_url'] use_account = setting['use_account'] word_per_sec =",
"WORDS_PER_LINE = 12 RESULT_VIEWING_TIME = 15 FINAL_VIEWING_TIME = 60*60 # 1 hour def",
"True: target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word != '': type_word(target_word, driver) if extract_text_to_file ==",
"setting[key] = value return setting def sign_in(driver, setting): email = setting['email'] password =",
"value = setting_temp[x][y+1].strip() if value.lower() == 'true': value = True setting[key] = value",
"import os,sys,inspect from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import",
"1 extracted_text = accumulate_words(target_word, word_count, extracted_text) time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME) break # extract words",
"from selenium.webdriver.support import expected_conditions as EC import time # Init working folder currentdir",
"setting_temp[x][y+1].strip() if value.lower() == 'true': value = True setting[key] = value return setting",
"with a 2D list # Convert setting list to dictionary for x in",
"typing_test_url = setting['typing_test_url'] use_account = setting['use_account'] word_per_sec = float(setting['word_per_sec']) num_test_loop = int(setting['num_test_loop']) extract_text_to_file",
"with account info driver.get(SIGNIN_URL) WebDriverWait(driver, 15).until(EC.element_to_be_clickable((By.ID,USERNAME_ID))) driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try logging in driver.find_element_by_id(LOGIN_BTN_ID).click()",
"extracted_text) time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME) break # extract words from test to file if",
"driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word, word_count, extracted_text): if word_count % WORDS_PER_LINE == 0: extracted_text +=",
"for results!\") # Retain browser for viewing time.sleep(FINAL_VIEWING_TIME) driver.close() def read_ini_settings(): setting_temp =",
"setting list to dictionary for x in range(len(setting_temp)): for y in range(num_of_value): key",
"= 0 driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY! It's typing... \"+LINE) while True: target_word",
"% WORDS_PER_LINE == 0: extracted_text += target_word + '\\n' else: extracted_text += target_word+'",
"= setting['typing_test_url'] use_account = setting['use_account'] word_per_sec = float(setting['word_per_sec']) num_test_loop = int(setting['num_test_loop']) extract_text_to_file =",
"PASSWORD_ID = \"<PASSWORD>\" LOGIN_BTN_ID = \"login-form-submit\" TARGET_WORD_SLTCR = \"#row1 > span.highlight\" INPUT_FIELD_ID =",
"in successfully!') def type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word, word_count, extracted_text): if word_count %",
"\"https://10fastfingers.com/login\" # ELEMENTS SELECTORS USERNAME_ID = \"UserEmail\" PASSWORD_ID = \"<PASSWORD>\" LOGIN_BTN_ID = \"login-form-submit\"",
"accumulate_words(target_word, word_count, extracted_text) time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME) break # extract words from test to",
"driver.find_element_by_id(USERNAME_ID).send_keys(email) driver.find_element_by_id(PASSWORD_ID).send_keys(password) # Try logging in driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable to",
"in driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable to sign in! Playing anonymously...') else:",
"time.sleep(RESULT_VIEWING_TIME) break # extract words from test to file if extract_text_to_file: with open('data/competition_text_file.txt',",
"file if extract_text_to_file: with open('data/competition_text_file.txt', 'w') as text_f: text_f.write(extracted_text) print('\\n'+LINE+\"\\nALL DONE! Check your",
"try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable to sign in! Playing anonymously...') else: print('Sign in",
"extracted_text): if word_count % WORDS_PER_LINE == 0: extracted_text += target_word + '\\n' else:",
"= value return setting def sign_in(driver, setting): email = setting['email'] password = setting['password']",
"os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) # URLS SIGNIN_URL = \"https://10fastfingers.com/login\" # ELEMENTS SELECTORS",
"'': type_word(target_word, driver) if extract_text_to_file == True: word_count += 1 extracted_text = accumulate_words(target_word,",
"def sign_in(driver, setting): email = setting['email'] password = setting['password'] # Fill text fields",
"def main(): # Initilize settings setting = read_ini_settings() typing_test_url = setting['typing_test_url'] use_account =",
"driver) if extract_text_to_file == True: word_count += 1 extracted_text = accumulate_words(target_word, word_count, extracted_text)",
"for i in range(num_test_loop): word_count = 0 driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY! It's",
"EC import time # Init working folder currentdir = os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir)",
"for x in range(len(setting_temp)): for y in range(num_of_value): key = setting_temp[x][y].strip() value =",
"as f: for item in f: if not (item.startswith('#') or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) #",
"\"+LINE) while True: target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word != '': type_word(target_word, driver) if",
"= {} num_of_value = 1 seperate_symbol = '=' # Open and read setting.ini",
"driver = webdriver.Chrome('bin/chromedriver.exe') # Sign in if use_account == True: sign_in(driver, setting) else:",
"a 2D list # Convert setting list to dictionary for x in range(len(setting_temp)):",
"\"inputfield\" # SPECIAL TYPE INPUT SPACE_KEY = \"\\ue00d\" # OTHERS LINE = '<>'*20",
"driver.find_element_by_id(LOGIN_BTN_ID).click() try: WebDriverWait(driver, 15).until(EC.url_changes(SIGNIN_URL)) except: print('Unable to sign in! Playing anonymously...') else: print('Sign",
"Playing anonymously...') else: print('Sign in successfully!') def type_word(target_word, driver): driver.find_element_by_id(INPUT_FIELD_ID).send_keys(target_word+SPACE_KEY) def accumulate_words(target_word, word_count,",
"= os.path.dirname(os.path.realpath(__file__)) parentdir = os.path.dirname(currentdir) sys.path.append(parentdir) # URLS SIGNIN_URL = \"https://10fastfingers.com/login\" # ELEMENTS",
"else: time.sleep(RESULT_VIEWING_TIME) break # extract words from test to file if extract_text_to_file: with",
"browser for viewing time.sleep(FINAL_VIEWING_TIME) driver.close() def read_ini_settings(): setting_temp = [] setting = {}",
"= setting['email'] password = setting['password'] # Fill text fields with account info driver.get(SIGNIN_URL)",
"= 60*60 # 1 hour def main(): # Initilize settings setting = read_ini_settings()",
"time.sleep(FINAL_VIEWING_TIME) driver.close() def read_ini_settings(): setting_temp = [] setting = {} num_of_value = 1",
"1 hour def main(): # Initilize settings setting = read_ini_settings() typing_test_url = setting['typing_test_url']",
"selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC",
"30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY! It's typing... \"+LINE) while True: target_word = driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word",
"setting['typing_test_url'] use_account = setting['use_account'] word_per_sec = float(setting['word_per_sec']) num_test_loop = int(setting['num_test_loop']) extract_text_to_file = setting['extract_text_to_file']",
"setting['extract_text_to_file'] extracted_text = '' driver = webdriver.Chrome('bin/chromedriver.exe') # Sign in if use_account ==",
"for viewing time.sleep(FINAL_VIEWING_TIME) driver.close() def read_ini_settings(): setting_temp = [] setting = {} num_of_value",
"= os.path.dirname(currentdir) sys.path.append(parentdir) # URLS SIGNIN_URL = \"https://10fastfingers.com/login\" # ELEMENTS SELECTORS USERNAME_ID =",
"range(num_of_value): key = setting_temp[x][y].strip() value = setting_temp[x][y+1].strip() if value.lower() == 'true': value =",
"anonymously...\") # Get text and type for i in range(num_test_loop): word_count = 0",
"setting.ini with open('data/setting.ini') as f: for item in f: if not (item.startswith('#') or",
"= setting_temp[x][y+1].strip() if value.lower() == 'true': value = True setting[key] = value return",
"# OTHERS LINE = '<>'*20 WORDS_PER_LINE = 12 RESULT_VIEWING_TIME = 15 FINAL_VIEWING_TIME =",
"setting def sign_in(driver, setting): email = setting['email'] password = setting['password'] # Fill text",
"= \"#row1 > span.highlight\" INPUT_FIELD_ID = \"inputfield\" # SPECIAL TYPE INPUT SPACE_KEY =",
"driver.find_element_by_css_selector(TARGET_WORD_SLTCR).text if target_word != '': type_word(target_word, driver) if extract_text_to_file == True: word_count +=",
"else: print(LINE+\"\\nPlaying anonymously...\") # Get text and type for i in range(num_test_loop): word_count",
"sign_in(driver, setting) else: print(LINE+\"\\nPlaying anonymously...\") # Get text and type for i in",
"if not (item.startswith('#') or item.startswith('\\n')): setting_temp.append(item.split(seperate_symbol)) # Will result with a 2D list",
"INPUT_FIELD_ID = \"inputfield\" # SPECIAL TYPE INPUT SPACE_KEY = \"\\ue00d\" # OTHERS LINE",
"0 driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY! It's typing... \"+LINE) while True: target_word =",
"i in range(num_test_loop): word_count = 0 driver.get(typing_test_url) WebDriverWait(driver, 30).until(EC.presence_of_element_located((By.CSS_SELECTOR, TARGET_WORD_SLTCR))) print(\"\\nYAY! It's typing...",
"= webdriver.Chrome('bin/chromedriver.exe') # Sign in if use_account == True: sign_in(driver, setting) else: print(LINE+\"\\nPlaying",
"'<>'*20 WORDS_PER_LINE = 12 RESULT_VIEWING_TIME = 15 FINAL_VIEWING_TIME = 60*60 # 1 hour",
"= setting_temp[x][y].strip() value = setting_temp[x][y+1].strip() if value.lower() == 'true': value = True setting[key]",
"value = True setting[key] = value return setting def sign_in(driver, setting): email =",
"list # Convert setting list to dictionary for x in range(len(setting_temp)): for y",
"word_count += 1 extracted_text = accumulate_words(target_word, word_count, extracted_text) time.sleep(word_per_sec) else: time.sleep(RESULT_VIEWING_TIME) break #"
] |
[
"in (\"MHS File\", \"Device\", \"Package\", \"SpeedGrade\"): if field not in xmp.values: raise ParseError(f\"missing",
"for line in lines)): if match is not None: xmp.values[match.group(1)] = match.group(2) else:",
"that required fields are present for field in (\"MHS File\", \"Device\", \"Package\", \"SpeedGrade\"):",
"Iterator[str]) -> Xmp: xmp = Xmp() # First line is always a comment,",
"Match each line and enumerate (line numbers are needed for errors) for n,",
"re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename: str, lines: Iterator[str]) -> Xmp: xmp = Xmp() # First",
"-> Xmp: xmp = Xmp() # First line is always a comment, skip",
"typing import Iterator from xps_convert.read.errors import ParseError from xps_convert.read.xmp import Xmp FIELD_RE =",
"Iterator from xps_convert.read.errors import ParseError from xps_convert.read.xmp import Xmp FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\") def",
"from xps_convert.read.errors import ParseError from xps_convert.read.xmp import Xmp FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename:",
"line in lines)): if match is not None: xmp.values[match.group(1)] = match.group(2) else: raise",
"# First line is always a comment, skip it next(lines) # Match each",
"# Match each line and enumerate (line numbers are needed for errors) for",
"line\", filename, n) # Verify that required fields are present for field in",
"Xmp: xmp = Xmp() # First line is always a comment, skip it",
"fields are present for field in (\"MHS File\", \"Device\", \"Package\", \"SpeedGrade\"): if field",
"\"Device\", \"Package\", \"SpeedGrade\"): if field not in xmp.values: raise ParseError(f\"missing required field ‘{field}’\",",
"comment, skip it next(lines) # Match each line and enumerate (line numbers are",
"xps_convert.read.errors import ParseError from xps_convert.read.xmp import Xmp FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename: str,",
"in lines)): if match is not None: xmp.values[match.group(1)] = match.group(2) else: raise ParseError(\"unable",
"= match.group(2) else: raise ParseError(\"unable to parse line\", filename, n) # Verify that",
"enumerate (line numbers are needed for errors) for n, match in enumerate((FIELD_RE.match(line) for",
"raise ParseError(\"unable to parse line\", filename, n) # Verify that required fields are",
"is not None: xmp.values[match.group(1)] = match.group(2) else: raise ParseError(\"unable to parse line\", filename,",
"import Iterator from xps_convert.read.errors import ParseError from xps_convert.read.xmp import Xmp FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\")",
"lines: Iterator[str]) -> Xmp: xmp = Xmp() # First line is always a",
"are present for field in (\"MHS File\", \"Device\", \"Package\", \"SpeedGrade\"): if field not",
"enumerate((FIELD_RE.match(line) for line in lines)): if match is not None: xmp.values[match.group(1)] = match.group(2)",
"xps_convert.read.xmp import Xmp FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename: str, lines: Iterator[str]) -> Xmp:",
"skip it next(lines) # Match each line and enumerate (line numbers are needed",
"to parse line\", filename, n) # Verify that required fields are present for",
"\"Package\", \"SpeedGrade\"): if field not in xmp.values: raise ParseError(f\"missing required field ‘{field}’\", filename)",
"each line and enumerate (line numbers are needed for errors) for n, match",
"# Verify that required fields are present for field in (\"MHS File\", \"Device\",",
"filename, n) # Verify that required fields are present for field in (\"MHS",
"xmp.values[match.group(1)] = match.group(2) else: raise ParseError(\"unable to parse line\", filename, n) # Verify",
"always a comment, skip it next(lines) # Match each line and enumerate (line",
"for errors) for n, match in enumerate((FIELD_RE.match(line) for line in lines)): if match",
"str, lines: Iterator[str]) -> Xmp: xmp = Xmp() # First line is always",
"xmp = Xmp() # First line is always a comment, skip it next(lines)",
"next(lines) # Match each line and enumerate (line numbers are needed for errors)",
"required fields are present for field in (\"MHS File\", \"Device\", \"Package\", \"SpeedGrade\"): if",
"None: xmp.values[match.group(1)] = match.group(2) else: raise ParseError(\"unable to parse line\", filename, n) #",
"(line numbers are needed for errors) for n, match in enumerate((FIELD_RE.match(line) for line",
"match is not None: xmp.values[match.group(1)] = match.group(2) else: raise ParseError(\"unable to parse line\",",
"and enumerate (line numbers are needed for errors) for n, match in enumerate((FIELD_RE.match(line)",
"it next(lines) # Match each line and enumerate (line numbers are needed for",
"ParseError(\"unable to parse line\", filename, n) # Verify that required fields are present",
"for field in (\"MHS File\", \"Device\", \"Package\", \"SpeedGrade\"): if field not in xmp.values:",
"Xmp FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename: str, lines: Iterator[str]) -> Xmp: xmp =",
"def parse_xmp(filename: str, lines: Iterator[str]) -> Xmp: xmp = Xmp() # First line",
"line is always a comment, skip it next(lines) # Match each line and",
"errors) for n, match in enumerate((FIELD_RE.match(line) for line in lines)): if match is",
"if field not in xmp.values: raise ParseError(f\"missing required field ‘{field}’\", filename) return xmp",
"lines)): if match is not None: xmp.values[match.group(1)] = match.group(2) else: raise ParseError(\"unable to",
"First line is always a comment, skip it next(lines) # Match each line",
"= re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename: str, lines: Iterator[str]) -> Xmp: xmp = Xmp() #",
"parse_xmp(filename: str, lines: Iterator[str]) -> Xmp: xmp = Xmp() # First line is",
"n, match in enumerate((FIELD_RE.match(line) for line in lines)): if match is not None:",
"parse line\", filename, n) # Verify that required fields are present for field",
"line and enumerate (line numbers are needed for errors) for n, match in",
"if match is not None: xmp.values[match.group(1)] = match.group(2) else: raise ParseError(\"unable to parse",
"Xmp() # First line is always a comment, skip it next(lines) # Match",
"import Xmp FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename: str, lines: Iterator[str]) -> Xmp: xmp",
"numbers are needed for errors) for n, match in enumerate((FIELD_RE.match(line) for line in",
"not None: xmp.values[match.group(1)] = match.group(2) else: raise ParseError(\"unable to parse line\", filename, n)",
"from xps_convert.read.xmp import Xmp FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename: str, lines: Iterator[str]) ->",
"(\"MHS File\", \"Device\", \"Package\", \"SpeedGrade\"): if field not in xmp.values: raise ParseError(f\"missing required",
"Verify that required fields are present for field in (\"MHS File\", \"Device\", \"Package\",",
"else: raise ParseError(\"unable to parse line\", filename, n) # Verify that required fields",
"for n, match in enumerate((FIELD_RE.match(line) for line in lines)): if match is not",
"needed for errors) for n, match in enumerate((FIELD_RE.match(line) for line in lines)): if",
"a comment, skip it next(lines) # Match each line and enumerate (line numbers",
"is always a comment, skip it next(lines) # Match each line and enumerate",
"n) # Verify that required fields are present for field in (\"MHS File\",",
"import ParseError from xps_convert.read.xmp import Xmp FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename: str, lines:",
"match in enumerate((FIELD_RE.match(line) for line in lines)): if match is not None: xmp.values[match.group(1)]",
"present for field in (\"MHS File\", \"Device\", \"Package\", \"SpeedGrade\"): if field not in",
"match.group(2) else: raise ParseError(\"unable to parse line\", filename, n) # Verify that required",
"from typing import Iterator from xps_convert.read.errors import ParseError from xps_convert.read.xmp import Xmp FIELD_RE",
"ParseError from xps_convert.read.xmp import Xmp FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename: str, lines: Iterator[str])",
"\"SpeedGrade\"): if field not in xmp.values: raise ParseError(f\"missing required field ‘{field}’\", filename) return",
"= Xmp() # First line is always a comment, skip it next(lines) #",
"File\", \"Device\", \"Package\", \"SpeedGrade\"): if field not in xmp.values: raise ParseError(f\"missing required field",
"field in (\"MHS File\", \"Device\", \"Package\", \"SpeedGrade\"): if field not in xmp.values: raise",
"are needed for errors) for n, match in enumerate((FIELD_RE.match(line) for line in lines)):",
"import re from typing import Iterator from xps_convert.read.errors import ParseError from xps_convert.read.xmp import",
"in enumerate((FIELD_RE.match(line) for line in lines)): if match is not None: xmp.values[match.group(1)] =",
"re from typing import Iterator from xps_convert.read.errors import ParseError from xps_convert.read.xmp import Xmp",
"FIELD_RE = re.compile(r\"([\\w\\s]+):\\s(.*)\") def parse_xmp(filename: str, lines: Iterator[str]) -> Xmp: xmp = Xmp()"
] |
[
"GaussSqrtInitializer: def params(self, shape): mat = np.random.standard_normal(shape) if len(shape) > 1: mat /=",
"as np ''' Initializer params(shape): Create initalized tensor @param shape tuple of dimensions",
"tensor @param shape tuple of dimensions of the expected tensor @return initialized tensor",
"numpy as np ''' Initializer params(shape): Create initalized tensor @param shape tuple of",
"class GaussInitializer: def params(self, shape): return np.random.standard_normal(shape) class GaussSqrtInitializer: def params(self, shape): mat",
"tensor ''' class GaussInitializer: def params(self, shape): return np.random.standard_normal(shape) class GaussSqrtInitializer: def params(self,",
"def params(self, shape): mat = np.random.standard_normal(shape) if len(shape) > 1: mat /= np.sqrt(shape[1])",
"@param shape tuple of dimensions of the expected tensor @return initialized tensor '''",
"def params(self, shape): return np.random.standard_normal(shape) class GaussSqrtInitializer: def params(self, shape): mat = np.random.standard_normal(shape)",
"params(self, shape): mat = np.random.standard_normal(shape) if len(shape) > 1: mat /= np.sqrt(shape[1]) return",
"Initializer params(shape): Create initalized tensor @param shape tuple of dimensions of the expected",
"<reponame>obs145628/py-neural-nets import numpy as np ''' Initializer params(shape): Create initalized tensor @param shape",
"@return initialized tensor ''' class GaussInitializer: def params(self, shape): return np.random.standard_normal(shape) class GaussSqrtInitializer:",
"of the expected tensor @return initialized tensor ''' class GaussInitializer: def params(self, shape):",
"params(self, shape): return np.random.standard_normal(shape) class GaussSqrtInitializer: def params(self, shape): mat = np.random.standard_normal(shape) if",
"expected tensor @return initialized tensor ''' class GaussInitializer: def params(self, shape): return np.random.standard_normal(shape)",
"initialized tensor ''' class GaussInitializer: def params(self, shape): return np.random.standard_normal(shape) class GaussSqrtInitializer: def",
"import numpy as np ''' Initializer params(shape): Create initalized tensor @param shape tuple",
"initalized tensor @param shape tuple of dimensions of the expected tensor @return initialized",
"''' class GaussInitializer: def params(self, shape): return np.random.standard_normal(shape) class GaussSqrtInitializer: def params(self, shape):",
"the expected tensor @return initialized tensor ''' class GaussInitializer: def params(self, shape): return",
"dimensions of the expected tensor @return initialized tensor ''' class GaussInitializer: def params(self,",
"of dimensions of the expected tensor @return initialized tensor ''' class GaussInitializer: def",
"return np.random.standard_normal(shape) class GaussSqrtInitializer: def params(self, shape): mat = np.random.standard_normal(shape) if len(shape) >",
"np ''' Initializer params(shape): Create initalized tensor @param shape tuple of dimensions of",
"shape): mat = np.random.standard_normal(shape) if len(shape) > 1: mat /= np.sqrt(shape[1]) return mat",
"tuple of dimensions of the expected tensor @return initialized tensor ''' class GaussInitializer:",
"shape): return np.random.standard_normal(shape) class GaussSqrtInitializer: def params(self, shape): mat = np.random.standard_normal(shape) if len(shape)",
"params(shape): Create initalized tensor @param shape tuple of dimensions of the expected tensor",
"Create initalized tensor @param shape tuple of dimensions of the expected tensor @return",
"GaussInitializer: def params(self, shape): return np.random.standard_normal(shape) class GaussSqrtInitializer: def params(self, shape): mat =",
"np.random.standard_normal(shape) class GaussSqrtInitializer: def params(self, shape): mat = np.random.standard_normal(shape) if len(shape) > 1:",
"shape tuple of dimensions of the expected tensor @return initialized tensor ''' class",
"tensor @return initialized tensor ''' class GaussInitializer: def params(self, shape): return np.random.standard_normal(shape) class",
"class GaussSqrtInitializer: def params(self, shape): mat = np.random.standard_normal(shape) if len(shape) > 1: mat",
"''' Initializer params(shape): Create initalized tensor @param shape tuple of dimensions of the"
] |
[
"bool = False, fill_value: Any = None ) -> \"TensorArray\": \"\"\" See docstring",
"KIND, either express or implied. # See the License for the specific language",
"Unless required by applicable law or agreed to in writing, software # distributed",
"int: return len(self._tensor) def __getitem__(self, item) -> \"TensorArray\": \"\"\" See docstring in `Extension",
"be better to derive from ExtensionOpsMixin, but not available \"\"\" @classmethod def _create_method(cls,",
"ArrowTensorArray.to_numpy(extension_array) return TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin to provide operators on underlying ndarray.",
"class in `pandas/core/arrays/base.py` for information about this method. \"\"\" if isinstance(key, (int, slice)):",
"in `pandas/core/arrays/base.py` for information about this method. \"\"\" return TensorArray(np.concatenate([a._tensor for a in",
"indices: Sequence[int], allow_fill: bool = False, fill_value: Any = None ) -> \"TensorArray\":",
"__arrow_array__(self, type=None): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor) # Add operators from the",
"def name(self) -> str: \"\"\"A string representation of the dtype.\"\"\" return \"TensorType\" @classmethod",
"string: str): \"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information about",
"\"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this",
"i in range(len(indices)): if indices[i] < 0: # Note that Numpy will broadcast",
"for a in to_concat])) def isna(self) -> np.array: \"\"\" See docstring in `ExtensionArray`",
"return self._tensor.__str__() def _reduce(self, name, skipna=True, **kwargs): \"\"\" See docstring in `ExtensionArray` class",
"(int, slice)): self._tensor[key] = value else: raise NotImplementedError(f\"__setitem__ with key type '{type(key)}' \"",
"data type for a column of tensors with the same shape. \"\"\" @property",
"the dtype.\"\"\" return \"TensorType\" @classmethod def construct_from_string(cls, string: str): \"\"\" See docstring in",
"def __len__(self) -> int: return len(self._tensor) def __getitem__(self, item) -> \"TensorArray\": \"\"\" See",
"def dtype(self) -> pd.api.extensions.ExtensionDtype: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for",
"return TensorArray(np.sum(self._tensor, axis=0)) else: raise NotImplementedError(f\"'{name}' aggregate not implemented.\") def __arrow_array__(self, type=None): from",
"\"\"\" @classmethod def _create_method(cls, op, coerce_to_dtype=True): # NOTE: this overrides, but coerce_to_dtype might",
"import text_extensions_for_pandas.util as util @pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas data type for a",
"return self._tensor.__repr__() def __str__(self): return self._tensor.__str__() def _reduce(self, name, skipna=True, **kwargs): \"\"\" See",
"pd.api.extensions.ExtensionDtype: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this",
"this file except in compliance with the License. # You may obtain a",
"the shape # of each row. values[i] = fill_value return TensorArray(values) @property def",
"dtype=None, copy=False, na_value=pd.api.extensions.no_default): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information",
"`ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO options",
"tensors with the same shape. \"\"\" @property def type(self): \"\"\"The type for a",
"A Pandas `ExtensionArray` that represents a column of `numpy.ndarray`s, or tensors, where the",
"`pandas/core/arrays/base.py` for information about this method. \"\"\" return TensorArray(np.concatenate([a._tensor for a in to_concat]))",
"values[i] = fill_value return TensorArray(values) @property def dtype(self) -> pd.api.extensions.ExtensionDtype: \"\"\" See docstring",
"-> \"TensorArray\": \"\"\" See docstring in `Extension Array` class in `pandas/core/arrays/base.py` for information",
"dimension is the count of tensors in the column. Each tensor must have",
"Union[np.ndarray, Sequence[np.ndarray]], make_contiguous: bool = True): \"\"\" :param values: A `numpy.ndarray` or sequence",
"@property def dtype(self) -> pd.api.extensions.ExtensionDtype: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py`",
"available \"\"\" @classmethod def _create_method(cls, op, coerce_to_dtype=True): # NOTE: this overrides, but coerce_to_dtype",
"return TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin to provide operators on underlying ndarray. TODO:",
"ANY KIND, either express or implied. # See the License for the specific",
"__len__(self) -> int: return len(self._tensor) def __getitem__(self, item) -> \"TensorArray\": \"\"\" See docstring",
"docstring in `Extension Array` class in `pandas/core/arrays/base.py` for information about this method. \"\"\"",
"\"\"\" # TODO any or all values in row nan? return np.any(np.isnan(self._tensor), axis=1)",
"len(self._tensor) def __getitem__(self, item) -> \"TensorArray\": \"\"\" See docstring in `Extension Array` class",
"better to derive from ExtensionOpsMixin, but not available \"\"\" @classmethod def _create_method(cls, op,",
"class in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO pandas converts",
"values = self._tensor.take(indices, axis=0) if allow_fill: # From API docs: \"[If allow_fill ==",
"numpy.ndarray, \" f\"but received {values} \" f\"of type '{type(values)}' instead.\") if not self._tensor.flags.c_contiguous",
"part of its normal control flow and # will pass this method bogus",
"cls) class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\" A Pandas `ExtensionArray` that represents a column of",
"pass this method bogus class names. if string == cls.__name__: return cls() else:",
"= TensorArray( self._tensor.copy(), ) # TODO: Copy cached properties too return ret def",
"\"\"\"A string representation of the dtype.\"\"\" return \"TensorType\" @classmethod def construct_from_string(cls, string: str):",
"_reduce(self, name, skipna=True, **kwargs): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for",
"__str__(self): return self._tensor.__str__() def _reduce(self, name, skipna=True, **kwargs): \"\"\" See docstring in `ExtensionArray`",
"map_infer(array, is_float) to format strings etc. # Return an ndarray for scalar item,",
"item, or TensorArray for slice if isinstance(item, int): return self._tensor[item] else: return TensorArray(self._tensor[item])",
"return TensorArray(np.concatenate([a._tensor for a in to_concat])) def isna(self) -> np.array: \"\"\" See docstring",
"in `Extension Array` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" #",
"WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See",
"method. \"\"\" if isinstance(key, (int, slice)): self._tensor[key] = value else: raise NotImplementedError(f\"__setitem__ with",
"\"\"\" if isinstance(key, (int, slice)): self._tensor[key] = value else: raise NotImplementedError(f\"__setitem__ with key",
"def isna(self) -> np.array: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for",
"\" f\"of type '{type(values)}' instead.\") if not self._tensor.flags.c_contiguous and make_contiguous: self._tensor = np.ascontiguousarray(self._tensor)",
"np.asarray, then applied a function e.g. map_infer(array, is_float) to format strings etc. #",
"= np.ascontiguousarray(self._tensor) @classmethod def _concat_same_type( cls, to_concat: Sequence[\"TensorArray\"] ) -> \"TensorArray\": \"\"\" See",
"# Part of text_extensions_for_pandas # # Pandas extensions to support columns of N-dimensional",
"True) return set_function_name(_binop, op_name, cls) class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\" A Pandas `ExtensionArray` that",
"bool = True): \"\"\" :param values: A `numpy.ndarray` or sequence of `numpy.ndarray`s of",
"with np.asarray, then applied a function e.g. map_infer(array, is_float) to format strings etc.",
"Union[int, np.ndarray], value: Any) -> None: \"\"\" See docstring in `ExtensionArray` class in",
"IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or",
"names. if string == cls.__name__: return cls() else: raise TypeError( f\"Cannot construct a",
"rvalues = other._tensor if isinstance(other, TensorArray) else other res = op(lvalues, rvalues) return",
"in the column. Each tensor must have the same shape. \"\"\" def __init__(self,",
"in `pandas/core/arrays/base.py` for information about this method. \"\"\" return TensorType() def to_numpy(self, dtype=None,",
"TensorType() def to_numpy(self, dtype=None, copy=False, na_value=pd.api.extensions.no_default): \"\"\" See docstring in `ExtensionArray` class in",
"is the count of tensors in the column. Each tensor must have the",
"return TensorArray(res) op_name = ops._get_op_name(op, True) return set_function_name(_binop, op_name, cls) class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin):",
"docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" if",
"License. # # # tensor.py # # Part of text_extensions_for_pandas # # Pandas",
"`pandas/core/arrays/base.py` for information about this method. \"\"\" if name == \"sum\": return TensorArray(np.sum(self._tensor,",
"OF ANY KIND, either express or implied. # See the License for the",
"of tensors with the same shape. \"\"\" @property def type(self): \"\"\"The type for",
"all values in row nan? return np.any(np.isnan(self._tensor), axis=1) def copy(self) -> \"TensorArray\": \"\"\"",
"of a TensorArray column.\"\"\" return np.ndarray @property def name(self) -> str: \"\"\"A string",
"self._tensor.take(indices, axis=0) if allow_fill: # From API docs: \"[If allow_fill == True, then]",
"rvalues) return TensorArray(res) op_name = ops._get_op_name(op, True) return set_function_name(_binop, op_name, cls) class TensorArray(pd.api.extensions.ExtensionArray,",
"isna(self) -> np.array: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information",
"docs: \"[If allow_fill == True, then] negative values in # `indices` indicate missing",
"Sequence): self._tensor = np.stack(values, axis=0) if len(values) > 0 else np.array([]) else: raise",
"@pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas data type for a column of tensors with",
"if indices[i] < 0: # Note that Numpy will broadcast the fill value",
"from '{string}'\") @classmethod def construct_array_type(cls): \"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py`",
"> 0 else np.array([]) else: raise TypeError(f\"Expected a numpy.ndarray or sequence of numpy.ndarray,",
"Corp. # Licensed under the Apache License, Version 2.0 (the \"License\"); # you",
"information about this method. \"\"\" return TensorType() def to_numpy(self, dtype=None, copy=False, na_value=pd.api.extensions.no_default): \"\"\"",
"cls, to_concat: Sequence[\"TensorArray\"] ) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in",
"NotImplementedError(f\"__setitem__ with key type '{type(key)}' \" f\"not implemented\") def __repr__(self): \"\"\" See docstring",
"TODO options return self._tensor def __len__(self) -> int: return len(self._tensor) def __getitem__(self, item)",
"`pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO pandas converts series with",
"import * import numpy as np import pandas as pd from pandas.compat import",
"ndarray. TODO: would be better to derive from ExtensionOpsMixin, but not available \"\"\"",
"isinstance(other, TensorArray) else other res = op(lvalues, rvalues) return TensorArray(res) op_name = ops._get_op_name(op,",
"Sequence[np.ndarray]], make_contiguous: bool = True): \"\"\" :param values: A `numpy.ndarray` or sequence of",
"method. \"\"\" return TensorArray(np.concatenate([a._tensor for a in to_concat])) def isna(self) -> np.array: \"\"\"",
"'{type(key)}' \" f\"not implemented\") def __repr__(self): \"\"\" See docstring in `ExtensionArray` class in",
"to support columns of N-dimensional tensors of equal shape. # from typing import",
"Upstream code uses exceptions as part of its normal control flow and #",
"an ndarray for scalar item, or TensorArray for slice if isinstance(item, int): return",
"in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" if name",
"or TensorArray for slice if isinstance(item, int): return self._tensor[item] else: return TensorArray(self._tensor[item]) def",
"return TensorArray(self._tensor[item]) def __setitem__(self, key: Union[int, np.ndarray], value: Any) -> None: \"\"\" See",
"\"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method.",
"a in to_concat])) def isna(self) -> np.array: \"\"\" See docstring in `ExtensionArray` class",
"for information about this method. \"\"\" if isinstance(key, (int, slice)): self._tensor[key] = value",
"software # distributed under the License is distributed on an \"AS IS\" BASIS,",
"TODO any or all values in row nan? return np.any(np.isnan(self._tensor), axis=1) def copy(self)",
"shape. \"\"\" @property def type(self): \"\"\"The type for a single row of a",
"# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to",
"return \"TensorType\" @classmethod def construct_from_string(cls, string: str): \"\"\" See docstring in `ExtensionDType` class",
"flow and # will pass this method bogus class names. if string ==",
"information about this method. \"\"\" ret = TensorArray( self._tensor.copy(), ) # TODO: Copy",
"about this method. \"\"\" return TensorArray(np.concatenate([a._tensor for a in to_concat])) def isna(self) ->",
"def construct_from_string(cls, string: str): \"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for",
"in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" return TensorType()",
"in `pandas/core/dtypes/base.py` for information about this method. \"\"\" # Upstream code uses exceptions",
"the fill value to the shape # of each row. values[i] = fill_value",
"self._tensor.flags.c_contiguous and make_contiguous: self._tensor = np.ascontiguousarray(self._tensor) @classmethod def _concat_same_type( cls, to_concat: Sequence[\"TensorArray\"] )",
"under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES",
"under the License. # # # tensor.py # # Part of text_extensions_for_pandas #",
"the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law",
"Part of text_extensions_for_pandas # # Pandas extensions to support columns of N-dimensional tensors",
"would be better to derive from ExtensionOpsMixin, but not available \"\"\" @classmethod def",
"row nan? return np.any(np.isnan(self._tensor), axis=1) def copy(self) -> \"TensorArray\": \"\"\" See docstring in",
"extension_array): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray values = ArrowTensorArray.to_numpy(extension_array) return TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\"",
"0 else np.array([]) else: raise TypeError(f\"Expected a numpy.ndarray or sequence of numpy.ndarray, \"",
"shape # of each row. values[i] = fill_value return TensorArray(values) @property def dtype(self)",
"Return an ndarray for scalar item, or TensorArray for slice if isinstance(item, int):",
"`ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" return TensorArray(np.concatenate([a._tensor for",
"`numpy.ndarray` or sequence of `numpy.ndarray`s of equal shape. :param make_contiguous: force values to",
"\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express",
"or all values in row nan? return np.any(np.isnan(self._tensor), axis=1) def copy(self) -> \"TensorArray\":",
"each row. values[i] = fill_value return TensorArray(values) @property def dtype(self) -> pd.api.extensions.ExtensionDtype: \"\"\"",
"for information about this method. \"\"\" # TODO pandas converts series with np.asarray,",
"its normal control flow and # will pass this method bogus class names.",
"Pandas data type for a column of tensors with the same shape. \"\"\"",
"in `pandas/core/dtypes/base.py` for information about this method. \"\"\" return TensorArray def __from_arrow__(self, extension_array):",
"required by applicable law or agreed to in writing, software # distributed under",
"Each tensor must have the same shape. \"\"\" def __init__(self, values: Union[np.ndarray, Sequence[np.ndarray]],",
"__init__(self, values: Union[np.ndarray, Sequence[np.ndarray]], make_contiguous: bool = True): \"\"\" :param values: A `numpy.ndarray`",
"and make_contiguous: self._tensor = np.ascontiguousarray(self._tensor) @classmethod def _concat_same_type( cls, to_concat: Sequence[\"TensorArray\"] ) ->",
"`pandas/core/arrays/base.py` for information about this method. \"\"\" ret = TensorArray( self._tensor.copy(), ) #",
"ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor) # Add operators from the mixin to the class TensorArray._add_arithmetic_ops()",
"== cls.__name__: return cls() else: raise TypeError( f\"Cannot construct a '{cls.__name__}' from '{string}'\")",
"if isinstance(item, int): return self._tensor[item] else: return TensorArray(self._tensor[item]) def __setitem__(self, key: Union[int, np.ndarray],",
"applicable law or agreed to in writing, software # distributed under the License",
"text_extensions_for_pandas # # Pandas extensions to support columns of N-dimensional tensors of equal",
":param make_contiguous: force values to be contiguous in memory \"\"\" if isinstance(values, np.ndarray):",
"\"\"\" Pandas data type for a column of tensors with the same shape.",
"from pandas.core import ops # Internal imports import text_extensions_for_pandas.util as util @pd.api.extensions.register_extension_dtype class",
"tensors, where the outer dimension is the count of tensors in the column.",
"\"\"\" values = self._tensor.take(indices, axis=0) if allow_fill: # From API docs: \"[If allow_fill",
"construct_from_string(cls, string: str): \"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information",
"def __arrow_array__(self, type=None): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor) # Add operators from",
"class in `pandas/core/arrays/base.py` for information about this method. \"\"\" values = self._tensor.take(indices, axis=0)",
"self._tensor rvalues = other._tensor if isinstance(other, TensorArray) else other res = op(lvalues, rvalues)",
"or sequence of numpy.ndarray, \" f\"but received {values} \" f\"of type '{type(values)}' instead.\")",
"na_value=pd.api.extensions.no_default): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this",
"representation of the dtype.\"\"\" return \"TensorType\" @classmethod def construct_from_string(cls, string: str): \"\"\" See",
"for information about this method. \"\"\" return TensorArray def __from_arrow__(self, extension_array): from text_extensions_for_pandas.array.arrow_conversion",
"column. Each tensor must have the same shape. \"\"\" def __init__(self, values: Union[np.ndarray,",
"or agreed to in writing, software # distributed under the License is distributed",
"None: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this",
"TensorArray(np.sum(self._tensor, axis=0)) else: raise NotImplementedError(f\"'{name}' aggregate not implemented.\") def __arrow_array__(self, type=None): from text_extensions_for_pandas.array.arrow_conversion",
"self._tensor[item] else: return TensorArray(self._tensor[item]) def __setitem__(self, key: Union[int, np.ndarray], value: Any) -> None:",
"return ArrowTensorArray.from_numpy(self._tensor) # Add operators from the mixin to the class TensorArray._add_arithmetic_ops() TensorArray._add_comparison_ops()",
"def construct_array_type(cls): \"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information about",
"uses exceptions as part of its normal control flow and # will pass",
"res = op(lvalues, rvalues) return TensorArray(res) op_name = ops._get_op_name(op, True) return set_function_name(_binop, op_name,",
"bogus class names. if string == cls.__name__: return cls() else: raise TypeError( f\"Cannot",
"IBM Corp. # Licensed under the Apache License, Version 2.0 (the \"License\"); #",
"CONDITIONS OF ANY KIND, either express or implied. # See the License for",
"class in `pandas/core/arrays/base.py` for information about this method. \"\"\" return self._tensor.__repr__() def __str__(self):",
"information about this method. \"\"\" if name == \"sum\": return TensorArray(np.sum(self._tensor, axis=0)) else:",
"for a single row of a TensorArray column.\"\"\" return np.ndarray @property def name(self)",
"\"\"\"The type for a single row of a TensorArray column.\"\"\" return np.ndarray @property",
"else: raise NotImplementedError(f\"__setitem__ with key type '{type(key)}' \" f\"not implemented\") def __repr__(self): \"\"\"",
"sequence of `numpy.ndarray`s of equal shape. :param make_contiguous: force values to be contiguous",
"governing permissions and # limitations under the License. # # # tensor.py #",
"= ArrowTensorArray.to_numpy(extension_array) return TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin to provide operators on underlying",
"np.array: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this",
"class names. if string == cls.__name__: return cls() else: raise TypeError( f\"Cannot construct",
"`ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO any",
"other._tensor if isinstance(other, TensorArray) else other res = op(lvalues, rvalues) return TensorArray(res) op_name",
"def take( self, indices: Sequence[int], allow_fill: bool = False, fill_value: Any = None",
"will broadcast the fill value to the shape # of each row. values[i]",
"needed def _binop(self, other): lvalues = self._tensor rvalues = other._tensor if isinstance(other, TensorArray)",
"in `pandas/core/arrays/base.py` for information about this method. \"\"\" return self._tensor.__repr__() def __str__(self): return",
"method. \"\"\" # Upstream code uses exceptions as part of its normal control",
"under the Apache License, Version 2.0 (the \"License\"); # you may not use",
"writing, software # distributed under the License is distributed on an \"AS IS\"",
"TensorArray def __from_arrow__(self, extension_array): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray values = ArrowTensorArray.to_numpy(extension_array) return TensorArray(values)",
"in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information about this method. \"\"\" # Upstream",
"op, coerce_to_dtype=True): # NOTE: this overrides, but coerce_to_dtype might not be needed def",
"# # # tensor.py # # Part of text_extensions_for_pandas # # Pandas extensions",
"You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #",
"other res = op(lvalues, rvalues) return TensorArray(res) op_name = ops._get_op_name(op, True) return set_function_name(_binop,",
"\"\"\" See docstring in `Extension Array` class in `pandas/core/arrays/base.py` for information about this",
"License. # You may obtain a copy of the License at # #",
"in # `indices` indicate missing values. These values are set to # `fill_value`.",
"class in `pandas/core/dtypes/base.py` for information about this method. \"\"\" return TensorArray def __from_arrow__(self,",
"def __repr__(self): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about",
"-> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about",
"TensorArray(np.concatenate([a._tensor for a in to_concat])) def isna(self) -> np.array: \"\"\" See docstring in",
"**kwargs): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this",
"values are set to # `fill_value`. for i in range(len(indices)): if indices[i] <",
"fill_value return TensorArray(values) @property def dtype(self) -> pd.api.extensions.ExtensionDtype: \"\"\" See docstring in `ExtensionArray`",
"row of a TensorArray column.\"\"\" return np.ndarray @property def name(self) -> str: \"\"\"A",
"def __from_arrow__(self, extension_array): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray values = ArrowTensorArray.to_numpy(extension_array) return TensorArray(values) class",
"if name == \"sum\": return TensorArray(np.sum(self._tensor, axis=0)) else: raise NotImplementedError(f\"'{name}' aggregate not implemented.\")",
"compliance with the License. # You may obtain a copy of the License",
"other): lvalues = self._tensor rvalues = other._tensor if isinstance(other, TensorArray) else other res",
"this method bogus class names. if string == cls.__name__: return cls() else: raise",
"# `indices` indicate missing values. These values are set to # `fill_value`. for",
"that represents a column of `numpy.ndarray`s, or tensors, where the outer dimension is",
") # TODO: Copy cached properties too return ret def take( self, indices:",
"indices[i] < 0: # Note that Numpy will broadcast the fill value to",
"in memory \"\"\" if isinstance(values, np.ndarray): self._tensor = values elif isinstance(values, Sequence): self._tensor",
"same shape. \"\"\" @property def type(self): \"\"\"The type for a single row of",
"return np.ndarray @property def name(self) -> str: \"\"\"A string representation of the dtype.\"\"\"",
"this method. \"\"\" return self._tensor.__repr__() def __str__(self): return self._tensor.__str__() def _reduce(self, name, skipna=True,",
"this method. \"\"\" if name == \"sum\": return TensorArray(np.sum(self._tensor, axis=0)) else: raise NotImplementedError(f\"'{name}'",
"of N-dimensional tensors of equal shape. # from typing import * import numpy",
"ops._get_op_name(op, True) return set_function_name(_binop, op_name, cls) class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\" A Pandas `ExtensionArray`",
"overrides, but coerce_to_dtype might not be needed def _binop(self, other): lvalues = self._tensor",
"the outer dimension is the count of tensors in the column. Each tensor",
"\"\"\" return TensorType() def to_numpy(self, dtype=None, copy=False, na_value=pd.api.extensions.no_default): \"\"\" See docstring in `ExtensionArray`",
"of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable",
"\"\"\" def __init__(self, values: Union[np.ndarray, Sequence[np.ndarray]], make_contiguous: bool = True): \"\"\" :param values:",
"exceptions as part of its normal control flow and # will pass this",
"applied a function e.g. map_infer(array, is_float) to format strings etc. # Return an",
"== \"sum\": return TensorArray(np.sum(self._tensor, axis=0)) else: raise NotImplementedError(f\"'{name}' aggregate not implemented.\") def __arrow_array__(self,",
"self._tensor[key] = value else: raise NotImplementedError(f\"__setitem__ with key type '{type(key)}' \" f\"not implemented\")",
"row. values[i] = fill_value return TensorArray(values) @property def dtype(self) -> pd.api.extensions.ExtensionDtype: \"\"\" See",
"this method. \"\"\" return TensorType() def to_numpy(self, dtype=None, copy=False, na_value=pd.api.extensions.no_default): \"\"\" See docstring",
"to the shape # of each row. values[i] = fill_value return TensorArray(values) @property",
"# tensor.py # # Part of text_extensions_for_pandas # # Pandas extensions to support",
"key: Union[int, np.ndarray], value: Any) -> None: \"\"\" See docstring in `ExtensionArray` class",
"be needed def _binop(self, other): lvalues = self._tensor rvalues = other._tensor if isinstance(other,",
"docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" ret",
"construct_array_type(cls): \"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information about this",
"ndarray for scalar item, or TensorArray for slice if isinstance(item, int): return self._tensor[item]",
"isinstance(item, int): return self._tensor[item] else: return TensorArray(self._tensor[item]) def __setitem__(self, key: Union[int, np.ndarray], value:",
"@classmethod def _concat_same_type( cls, to_concat: Sequence[\"TensorArray\"] ) -> \"TensorArray\": \"\"\" See docstring in",
"not use this file except in compliance with the License. # You may",
"import set_function_name from pandas.core import ops # Internal imports import text_extensions_for_pandas.util as util",
"# TODO: Copy cached properties too return ret def take( self, indices: Sequence[int],",
"if not self._tensor.flags.c_contiguous and make_contiguous: self._tensor = np.ascontiguousarray(self._tensor) @classmethod def _concat_same_type( cls, to_concat:",
"\"\"\" A Pandas `ExtensionArray` that represents a column of `numpy.ndarray`s, or tensors, where",
"if isinstance(values, np.ndarray): self._tensor = values elif isinstance(values, Sequence): self._tensor = np.stack(values, axis=0)",
"Array` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO pandas",
"`pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO options return self._tensor def",
"text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor) # Add operators from the mixin to the",
"in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" if isinstance(key,",
"method. \"\"\" if name == \"sum\": return TensorArray(np.sum(self._tensor, axis=0)) else: raise NotImplementedError(f\"'{name}' aggregate",
"return TensorArray(values) @property def dtype(self) -> pd.api.extensions.ExtensionDtype: \"\"\" See docstring in `ExtensionArray` class",
"True, then] negative values in # `indices` indicate missing values. These values are",
"tensor.py # # Part of text_extensions_for_pandas # # Pandas extensions to support columns",
"pandas.compat import set_function_name from pandas.core import ops # Internal imports import text_extensions_for_pandas.util as",
"License, Version 2.0 (the \"License\"); # you may not use this file except",
"-> str: \"\"\"A string representation of the dtype.\"\"\" return \"TensorType\" @classmethod def construct_from_string(cls,",
"@classmethod def construct_array_type(cls): \"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information",
"\" f\"but received {values} \" f\"of type '{type(values)}' instead.\") if not self._tensor.flags.c_contiguous and",
"API docs: \"[If allow_fill == True, then] negative values in # `indices` indicate",
"must have the same shape. \"\"\" def __init__(self, values: Union[np.ndarray, Sequence[np.ndarray]], make_contiguous: bool",
"distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY",
"Internal imports import text_extensions_for_pandas.util as util @pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas data type",
"outer dimension is the count of tensors in the column. Each tensor must",
"columns of N-dimensional tensors of equal shape. # from typing import * import",
"= False, fill_value: Any = None ) -> \"TensorArray\": \"\"\" See docstring in",
"isinstance(values, Sequence): self._tensor = np.stack(values, axis=0) if len(values) > 0 else np.array([]) else:",
"docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" values",
"\"\"\" return TensorArray def __from_arrow__(self, extension_array): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray values = ArrowTensorArray.to_numpy(extension_array)",
"class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin to provide operators on underlying ndarray. TODO: would be",
"Numpy will broadcast the fill value to the shape # of each row.",
"of the dtype.\"\"\" return \"TensorType\" @classmethod def construct_from_string(cls, string: str): \"\"\" See docstring",
"* import numpy as np import pandas as pd from pandas.compat import set_function_name",
"about this method. \"\"\" if name == \"sum\": return TensorArray(np.sum(self._tensor, axis=0)) else: raise",
"is_float) to format strings etc. # Return an ndarray for scalar item, or",
"-> pd.api.extensions.ExtensionDtype: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about",
"of `numpy.ndarray`s of equal shape. :param make_contiguous: force values to be contiguous in",
"# you may not use this file except in compliance with the License.",
"See docstring in `Extension Array` class in `pandas/core/arrays/base.py` for information about this method.",
"column of tensors with the same shape. \"\"\" @property def type(self): \"\"\"The type",
"class in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO options return",
"agreed to in writing, software # distributed under the License is distributed on",
"import ops # Internal imports import text_extensions_for_pandas.util as util @pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype): \"\"\"",
"this method. \"\"\" # TODO any or all values in row nan? return",
"but not available \"\"\" @classmethod def _create_method(cls, op, coerce_to_dtype=True): # NOTE: this overrides,",
"broadcast the fill value to the shape # of each row. values[i] =",
"TensorArray(res) op_name = ops._get_op_name(op, True) return set_function_name(_binop, op_name, cls) class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\"",
"type '{type(key)}' \" f\"not implemented\") def __repr__(self): \"\"\" See docstring in `ExtensionArray` class",
"single row of a TensorArray column.\"\"\" return np.ndarray @property def name(self) -> str:",
"np import pandas as pd from pandas.compat import set_function_name from pandas.core import ops",
"def __setitem__(self, key: Union[int, np.ndarray], value: Any) -> None: \"\"\" See docstring in",
"NotImplementedError(f\"'{name}' aggregate not implemented.\") def __arrow_array__(self, type=None): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor)",
"class in `pandas/core/arrays/base.py` for information about this method. \"\"\" return TensorType() def to_numpy(self,",
"this method. \"\"\" # Upstream code uses exceptions as part of its normal",
"type=None): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor) # Add operators from the mixin",
"(the \"License\"); # you may not use this file except in compliance with",
"as part of its normal control flow and # will pass this method",
"np.stack(values, axis=0) if len(values) > 0 else np.array([]) else: raise TypeError(f\"Expected a numpy.ndarray",
"about this method. \"\"\" if isinstance(key, (int, slice)): self._tensor[key] = value else: raise",
"lvalues = self._tensor rvalues = other._tensor if isinstance(other, TensorArray) else other res =",
"make_contiguous: bool = True): \"\"\" :param values: A `numpy.ndarray` or sequence of `numpy.ndarray`s",
") -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information",
"in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" return self._tensor.__repr__()",
"`indices` indicate missing values. These values are set to # `fill_value`. for i",
"this overrides, but coerce_to_dtype might not be needed def _binop(self, other): lvalues =",
"self._tensor = values elif isinstance(values, Sequence): self._tensor = np.stack(values, axis=0) if len(values) >",
"about this method. \"\"\" return TensorType() def to_numpy(self, dtype=None, copy=False, na_value=pd.api.extensions.no_default): \"\"\" See",
"of equal shape. :param make_contiguous: force values to be contiguous in memory \"\"\"",
"0: # Note that Numpy will broadcast the fill value to the shape",
"np.any(np.isnan(self._tensor), axis=1) def copy(self) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in",
"# Unless required by applicable law or agreed to in writing, software #",
"a '{cls.__name__}' from '{string}'\") @classmethod def construct_array_type(cls): \"\"\" See docstring in `ExtensionDType` class",
"`ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" return self._tensor.__repr__() def",
"Copy cached properties too return ret def take( self, indices: Sequence[int], allow_fill: bool",
"return cls() else: raise TypeError( f\"Cannot construct a '{cls.__name__}' from '{string}'\") @classmethod def",
"e.g. map_infer(array, is_float) to format strings etc. # Return an ndarray for scalar",
"from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor) # Add operators from the mixin to",
"by applicable law or agreed to in writing, software # distributed under the",
"\"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information about this method.",
"value else: raise NotImplementedError(f\"__setitem__ with key type '{type(key)}' \" f\"not implemented\") def __repr__(self):",
"TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas data type for a column of tensors with the same",
"# # tensor.py # # Part of text_extensions_for_pandas # # Pandas extensions to",
"in range(len(indices)): if indices[i] < 0: # Note that Numpy will broadcast the",
"name == \"sum\": return TensorArray(np.sum(self._tensor, axis=0)) else: raise NotImplementedError(f\"'{name}' aggregate not implemented.\") def",
"information about this method. \"\"\" values = self._tensor.take(indices, axis=0) if allow_fill: # From",
"copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by",
"\"\"\" :param values: A `numpy.ndarray` or sequence of `numpy.ndarray`s of equal shape. :param",
"# TODO pandas converts series with np.asarray, then applied a function e.g. map_infer(array,",
"format strings etc. # Return an ndarray for scalar item, or TensorArray for",
"__repr__(self): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this",
"of `numpy.ndarray`s, or tensors, where the outer dimension is the count of tensors",
"Note that Numpy will broadcast the fill value to the shape # of",
"in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO pandas converts series",
"text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray values = ArrowTensorArray.to_numpy(extension_array) return TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin to",
"information about this method. \"\"\" return TensorArray(np.concatenate([a._tensor for a in to_concat])) def isna(self)",
"np.ndarray], value: Any) -> None: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py`",
"\"sum\": return TensorArray(np.sum(self._tensor, axis=0)) else: raise NotImplementedError(f\"'{name}' aggregate not implemented.\") def __arrow_array__(self, type=None):",
"to format strings etc. # Return an ndarray for scalar item, or TensorArray",
"shape. :param make_contiguous: force values to be contiguous in memory \"\"\" if isinstance(values,",
"this method. \"\"\" return TensorArray(np.concatenate([a._tensor for a in to_concat])) def isna(self) -> np.array:",
"def type(self): \"\"\"The type for a single row of a TensorArray column.\"\"\" return",
"\"\"\" return TensorArray(np.concatenate([a._tensor for a in to_concat])) def isna(self) -> np.array: \"\"\" See",
"method. \"\"\" values = self._tensor.take(indices, axis=0) if allow_fill: # From API docs: \"[If",
"values in # `indices` indicate missing values. These values are set to #",
"of numpy.ndarray, \" f\"but received {values} \" f\"of type '{type(values)}' instead.\") if not",
"shape. \"\"\" def __init__(self, values: Union[np.ndarray, Sequence[np.ndarray]], make_contiguous: bool = True): \"\"\" :param",
"information about this method. \"\"\" # TODO any or all values in row",
"\"TensorArray\": \"\"\" See docstring in `Extension Array` class in `pandas/core/arrays/base.py` for information about",
"file except in compliance with the License. # You may obtain a copy",
"TypeError( f\"Cannot construct a '{cls.__name__}' from '{string}'\") @classmethod def construct_array_type(cls): \"\"\" See docstring",
"for information about this method. \"\"\" ret = TensorArray( self._tensor.copy(), ) # TODO:",
"text_extensions_for_pandas.util as util @pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas data type for a column",
"def __getitem__(self, item) -> \"TensorArray\": \"\"\" See docstring in `Extension Array` class in",
"dtype.\"\"\" return \"TensorType\" @classmethod def construct_from_string(cls, string: str): \"\"\" See docstring in `ExtensionDType`",
"code uses exceptions as part of its normal control flow and # will",
"else np.array([]) else: raise TypeError(f\"Expected a numpy.ndarray or sequence of numpy.ndarray, \" f\"but",
"name(self) -> str: \"\"\"A string representation of the dtype.\"\"\" return \"TensorType\" @classmethod def",
"permissions and # limitations under the License. # # # tensor.py # #",
"`ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" ret = TensorArray(",
"class in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO any or",
"License for the specific language governing permissions and # limitations under the License.",
"limitations under the License. # # # tensor.py # # Part of text_extensions_for_pandas",
"np.ascontiguousarray(self._tensor) @classmethod def _concat_same_type( cls, to_concat: Sequence[\"TensorArray\"] ) -> \"TensorArray\": \"\"\" See docstring",
":param values: A `numpy.ndarray` or sequence of `numpy.ndarray`s of equal shape. :param make_contiguous:",
"implemented.\") def __arrow_array__(self, type=None): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor) # Add operators",
"self._tensor = np.ascontiguousarray(self._tensor) @classmethod def _concat_same_type( cls, to_concat: Sequence[\"TensorArray\"] ) -> \"TensorArray\": \"\"\"",
"= value else: raise NotImplementedError(f\"__setitem__ with key type '{type(key)}' \" f\"not implemented\") def",
"to in writing, software # distributed under the License is distributed on an",
"axis=0)) else: raise NotImplementedError(f\"'{name}' aggregate not implemented.\") def __arrow_array__(self, type=None): from text_extensions_for_pandas.array.arrow_conversion import",
"be contiguous in memory \"\"\" if isinstance(values, np.ndarray): self._tensor = values elif isinstance(values,",
"_binop(self, other): lvalues = self._tensor rvalues = other._tensor if isinstance(other, TensorArray) else other",
"pandas converts series with np.asarray, then applied a function e.g. map_infer(array, is_float) to",
"implied. # See the License for the specific language governing permissions and #",
"will pass this method bogus class names. if string == cls.__name__: return cls()",
"column of `numpy.ndarray`s, or tensors, where the outer dimension is the count of",
"\"License\"); # you may not use this file except in compliance with the",
"about this method. \"\"\" ret = TensorArray( self._tensor.copy(), ) # TODO: Copy cached",
"obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless",
"\"\"\" Mixin to provide operators on underlying ndarray. TODO: would be better to",
"options return self._tensor def __len__(self) -> int: return len(self._tensor) def __getitem__(self, item) ->",
"for information about this method. \"\"\" # TODO any or all values in",
"return set_function_name(_binop, op_name, cls) class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\" A Pandas `ExtensionArray` that represents",
"f\"Cannot construct a '{cls.__name__}' from '{string}'\") @classmethod def construct_array_type(cls): \"\"\" See docstring in",
"class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\" A Pandas `ExtensionArray` that represents a column of `numpy.ndarray`s,",
"= self._tensor.take(indices, axis=0) if allow_fill: # From API docs: \"[If allow_fill == True,",
"axis=1) def copy(self) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py`",
"in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO any or all",
"of tensors in the column. Each tensor must have the same shape. \"\"\"",
"about this method. \"\"\" values = self._tensor.take(indices, axis=0) if allow_fill: # From API",
"# will pass this method bogus class names. if string == cls.__name__: return",
"ret = TensorArray( self._tensor.copy(), ) # TODO: Copy cached properties too return ret",
"self._tensor.__str__() def _reduce(self, name, skipna=True, **kwargs): \"\"\" See docstring in `ExtensionArray` class in",
"op(lvalues, rvalues) return TensorArray(res) op_name = ops._get_op_name(op, True) return set_function_name(_binop, op_name, cls) class",
"`pandas/core/dtypes/base.py` for information about this method. \"\"\" # Upstream code uses exceptions as",
"import ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor) # Add operators from the mixin to the class",
"or implied. # See the License for the specific language governing permissions and",
"N-dimensional tensors of equal shape. # from typing import * import numpy as",
"f\"of type '{type(values)}' instead.\") if not self._tensor.flags.c_contiguous and make_contiguous: self._tensor = np.ascontiguousarray(self._tensor) @classmethod",
"docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" return",
"type for a single row of a TensorArray column.\"\"\" return np.ndarray @property def",
"Apache License, Version 2.0 (the \"License\"); # you may not use this file",
"= fill_value return TensorArray(values) @property def dtype(self) -> pd.api.extensions.ExtensionDtype: \"\"\" See docstring in",
"else: return TensorArray(self._tensor[item]) def __setitem__(self, key: Union[int, np.ndarray], value: Any) -> None: \"\"\"",
"raise TypeError( f\"Cannot construct a '{cls.__name__}' from '{string}'\") @classmethod def construct_array_type(cls): \"\"\" See",
"OR CONDITIONS OF ANY KIND, either express or implied. # See the License",
"may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #",
"def _binop(self, other): lvalues = self._tensor rvalues = other._tensor if isinstance(other, TensorArray) else",
"for information about this method. \"\"\" values = self._tensor.take(indices, axis=0) if allow_fill: #",
"len(values) > 0 else np.array([]) else: raise TypeError(f\"Expected a numpy.ndarray or sequence of",
"op_name, cls) class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\" A Pandas `ExtensionArray` that represents a column",
"`pandas/core/arrays/base.py` for information about this method. \"\"\" return TensorType() def to_numpy(self, dtype=None, copy=False,",
"http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,",
"raise NotImplementedError(f\"'{name}' aggregate not implemented.\") def __arrow_array__(self, type=None): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray return",
"in writing, software # distributed under the License is distributed on an \"AS",
"information about this method. \"\"\" if isinstance(key, (int, slice)): self._tensor[key] = value else:",
"with key type '{type(key)}' \" f\"not implemented\") def __repr__(self): \"\"\" See docstring in",
"TODO: would be better to derive from ExtensionOpsMixin, but not available \"\"\" @classmethod",
"but coerce_to_dtype might not be needed def _binop(self, other): lvalues = self._tensor rvalues",
"= other._tensor if isinstance(other, TensorArray) else other res = op(lvalues, rvalues) return TensorArray(res)",
"a function e.g. map_infer(array, is_float) to format strings etc. # Return an ndarray",
"`ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" return TensorType() def",
"TODO pandas converts series with np.asarray, then applied a function e.g. map_infer(array, is_float)",
"have the same shape. \"\"\" def __init__(self, values: Union[np.ndarray, Sequence[np.ndarray]], make_contiguous: bool =",
"class in `pandas/core/dtypes/base.py` for information about this method. \"\"\" # Upstream code uses",
"These values are set to # `fill_value`. for i in range(len(indices)): if indices[i]",
"# See the License for the specific language governing permissions and # limitations",
"the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR",
"contiguous in memory \"\"\" if isinstance(values, np.ndarray): self._tensor = values elif isinstance(values, Sequence):",
"else: raise NotImplementedError(f\"'{name}' aggregate not implemented.\") def __arrow_array__(self, type=None): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray",
"fill_value: Any = None ) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class",
"`ExtensionDType` class in `pandas/core/dtypes/base.py` for information about this method. \"\"\" return TensorArray def",
"= ops._get_op_name(op, True) return set_function_name(_binop, op_name, cls) class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\" A Pandas",
"def __str__(self): return self._tensor.__str__() def _reduce(self, name, skipna=True, **kwargs): \"\"\" See docstring in",
"`ExtensionArray` that represents a column of `numpy.ndarray`s, or tensors, where the outer dimension",
"then applied a function e.g. map_infer(array, is_float) to format strings etc. # Return",
"<filename>text_extensions_for_pandas/array/tensor.py # # Copyright (c) 2020 IBM Corp. # Licensed under the Apache",
"for information about this method. \"\"\" # TODO options return self._tensor def __len__(self)",
"TensorArray for slice if isinstance(item, int): return self._tensor[item] else: return TensorArray(self._tensor[item]) def __setitem__(self,",
"def copy(self) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for",
"np.ndarray @property def name(self) -> str: \"\"\"A string representation of the dtype.\"\"\" return",
"method. \"\"\" ret = TensorArray( self._tensor.copy(), ) # TODO: Copy cached properties too",
"return np.any(np.isnan(self._tensor), axis=1) def copy(self) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class",
"shape. # from typing import * import numpy as np import pandas as",
"TensorArray column.\"\"\" return np.ndarray @property def name(self) -> str: \"\"\"A string representation of",
"See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information about this method. \"\"\"",
"properties too return ret def take( self, indices: Sequence[int], allow_fill: bool = False,",
"raise TypeError(f\"Expected a numpy.ndarray or sequence of numpy.ndarray, \" f\"but received {values} \"",
"values = ArrowTensorArray.to_numpy(extension_array) return TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin to provide operators on",
"coerce_to_dtype=True): # NOTE: this overrides, but coerce_to_dtype might not be needed def _binop(self,",
"information about this method. \"\"\" # TODO pandas converts series with np.asarray, then",
"the Apache License, Version 2.0 (the \"License\"); # you may not use this",
"imports import text_extensions_for_pandas.util as util @pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas data type for",
"`numpy.ndarray`s, or tensors, where the outer dimension is the count of tensors in",
"you may not use this file except in compliance with the License. #",
"`pandas/core/arrays/base.py` for information about this method. \"\"\" values = self._tensor.take(indices, axis=0) if allow_fill:",
"item) -> \"TensorArray\": \"\"\" See docstring in `Extension Array` class in `pandas/core/arrays/base.py` for",
"set_function_name(_binop, op_name, cls) class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\" A Pandas `ExtensionArray` that represents a",
"# TODO any or all values in row nan? return np.any(np.isnan(self._tensor), axis=1) def",
"negative values in # `indices` indicate missing values. These values are set to",
"indicate missing values. These values are set to # `fill_value`. for i in",
"TypeError(f\"Expected a numpy.ndarray or sequence of numpy.ndarray, \" f\"but received {values} \" f\"of",
"\" f\"not implemented\") def __repr__(self): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py`",
"cls.__name__: return cls() else: raise TypeError( f\"Cannot construct a '{cls.__name__}' from '{string}'\") @classmethod",
"of each row. values[i] = fill_value return TensorArray(values) @property def dtype(self) -> pd.api.extensions.ExtensionDtype:",
"cls() else: raise TypeError( f\"Cannot construct a '{cls.__name__}' from '{string}'\") @classmethod def construct_array_type(cls):",
"Sequence[\"TensorArray\"] ) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for",
"and # will pass this method bogus class names. if string == cls.__name__:",
"function e.g. map_infer(array, is_float) to format strings etc. # Return an ndarray for",
"in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO options return self._tensor",
"# # Copyright (c) 2020 IBM Corp. # Licensed under the Apache License,",
"use this file except in compliance with the License. # You may obtain",
"coerce_to_dtype might not be needed def _binop(self, other): lvalues = self._tensor rvalues =",
"about this method. \"\"\" return self._tensor.__repr__() def __str__(self): return self._tensor.__str__() def _reduce(self, name,",
"received {values} \" f\"of type '{type(values)}' instead.\") if not self._tensor.flags.c_contiguous and make_contiguous: self._tensor",
"# of each row. values[i] = fill_value return TensorArray(values) @property def dtype(self) ->",
"count of tensors in the column. Each tensor must have the same shape.",
"method. \"\"\" # TODO options return self._tensor def __len__(self) -> int: return len(self._tensor)",
"\"\"\" # TODO options return self._tensor def __len__(self) -> int: return len(self._tensor) def",
"Mixin to provide operators on underlying ndarray. TODO: would be better to derive",
"the same shape. \"\"\" def __init__(self, values: Union[np.ndarray, Sequence[np.ndarray]], make_contiguous: bool = True):",
"are set to # `fill_value`. for i in range(len(indices)): if indices[i] < 0:",
"else other res = op(lvalues, rvalues) return TensorArray(res) op_name = ops._get_op_name(op, True) return",
"language governing permissions and # limitations under the License. # # # tensor.py",
"= np.stack(values, axis=0) if len(values) > 0 else np.array([]) else: raise TypeError(f\"Expected a",
"to_concat])) def isna(self) -> np.array: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py`",
"self, indices: Sequence[int], allow_fill: bool = False, fill_value: Any = None ) ->",
"allow_fill == True, then] negative values in # `indices` indicate missing values. These",
"this method. \"\"\" # TODO pandas converts series with np.asarray, then applied a",
"'{string}'\") @classmethod def construct_array_type(cls): \"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for",
"# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may",
"column.\"\"\" return np.ndarray @property def name(self) -> str: \"\"\"A string representation of the",
"Any) -> None: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information",
"not self._tensor.flags.c_contiguous and make_contiguous: self._tensor = np.ascontiguousarray(self._tensor) @classmethod def _concat_same_type( cls, to_concat: Sequence[\"TensorArray\"]",
"`numpy.ndarray`s of equal shape. :param make_contiguous: force values to be contiguous in memory",
"not implemented.\") def __arrow_array__(self, type=None): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor) # Add",
"about this method. \"\"\" # TODO options return self._tensor def __len__(self) -> int:",
"import ArrowTensorArray values = ArrowTensorArray.to_numpy(extension_array) return TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin to provide",
"in to_concat])) def isna(self) -> np.array: \"\"\" See docstring in `ExtensionArray` class in",
"name, skipna=True, **kwargs): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information",
"__setitem__(self, key: Union[int, np.ndarray], value: Any) -> None: \"\"\" See docstring in `ExtensionArray`",
"-> np.array: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about",
"return TensorArray def __from_arrow__(self, extension_array): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray values = ArrowTensorArray.to_numpy(extension_array) return",
"def _create_method(cls, op, coerce_to_dtype=True): # NOTE: this overrides, but coerce_to_dtype might not be",
"Pandas extensions to support columns of N-dimensional tensors of equal shape. # from",
"this method. \"\"\" return TensorArray def __from_arrow__(self, extension_array): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray values",
"in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" ret =",
"_create_method(cls, op, coerce_to_dtype=True): # NOTE: this overrides, but coerce_to_dtype might not be needed",
"2.0 (the \"License\"); # you may not use this file except in compliance",
"from pandas.compat import set_function_name from pandas.core import ops # Internal imports import text_extensions_for_pandas.util",
"NOTE: this overrides, but coerce_to_dtype might not be needed def _binop(self, other): lvalues",
"Pandas `ExtensionArray` that represents a column of `numpy.ndarray`s, or tensors, where the outer",
"then] negative values in # `indices` indicate missing values. These values are set",
"range(len(indices)): if indices[i] < 0: # Note that Numpy will broadcast the fill",
"for the specific language governing permissions and # limitations under the License. #",
"allow_fill: bool = False, fill_value: Any = None ) -> \"TensorArray\": \"\"\" See",
"method. \"\"\" return self._tensor.__repr__() def __str__(self): return self._tensor.__str__() def _reduce(self, name, skipna=True, **kwargs):",
"# # Pandas extensions to support columns of N-dimensional tensors of equal shape.",
"tensors in the column. Each tensor must have the same shape. \"\"\" def",
"WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the",
"or sequence of `numpy.ndarray`s of equal shape. :param make_contiguous: force values to be",
"a TensorArray column.\"\"\" return np.ndarray @property def name(self) -> str: \"\"\"A string representation",
"@classmethod def _create_method(cls, op, coerce_to_dtype=True): # NOTE: this overrides, but coerce_to_dtype might not",
"from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray values = ArrowTensorArray.to_numpy(extension_array) return TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin",
"string representation of the dtype.\"\"\" return \"TensorType\" @classmethod def construct_from_string(cls, string: str): \"\"\"",
"return self._tensor def __len__(self) -> int: return len(self._tensor) def __getitem__(self, item) -> \"TensorArray\":",
"instead.\") if not self._tensor.flags.c_contiguous and make_contiguous: self._tensor = np.ascontiguousarray(self._tensor) @classmethod def _concat_same_type( cls,",
"== True, then] negative values in # `indices` indicate missing values. These values",
"`ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" if name ==",
"on underlying ndarray. TODO: would be better to derive from ExtensionOpsMixin, but not",
"-> int: return len(self._tensor) def __getitem__(self, item) -> \"TensorArray\": \"\"\" See docstring in",
"TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin to provide operators on underlying ndarray. TODO: would",
"= self._tensor rvalues = other._tensor if isinstance(other, TensorArray) else other res = op(lvalues,",
"# # Unless required by applicable law or agreed to in writing, software",
"# Note that Numpy will broadcast the fill value to the shape #",
"about this method. \"\"\" # TODO pandas converts series with np.asarray, then applied",
"See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\"",
"express or implied. # See the License for the specific language governing permissions",
"# limitations under the License. # # # tensor.py # # Part of",
"copy=False, na_value=pd.api.extensions.no_default): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about",
"might not be needed def _binop(self, other): lvalues = self._tensor rvalues = other._tensor",
"for slice if isinstance(item, int): return self._tensor[item] else: return TensorArray(self._tensor[item]) def __setitem__(self, key:",
"\"\"\" return self._tensor.__repr__() def __str__(self): return self._tensor.__str__() def _reduce(self, name, skipna=True, **kwargs): \"\"\"",
"`ExtensionDType` class in `pandas/core/dtypes/base.py` for information about this method. \"\"\" # Upstream code",
"# # Part of text_extensions_for_pandas # # Pandas extensions to support columns of",
"typing import * import numpy as np import pandas as pd from pandas.compat",
"to_concat: Sequence[\"TensorArray\"] ) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py`",
"class in `pandas/core/arrays/base.py` for information about this method. \"\"\" return TensorArray(np.concatenate([a._tensor for a",
"either express or implied. # See the License for the specific language governing",
"in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" values =",
"TensorArray(self._tensor[item]) def __setitem__(self, key: Union[int, np.ndarray], value: Any) -> None: \"\"\" See docstring",
"op_name = ops._get_op_name(op, True) return set_function_name(_binop, op_name, cls) class TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\" A",
"this method. \"\"\" if isinstance(key, (int, slice)): self._tensor[key] = value else: raise NotImplementedError(f\"__setitem__",
"aggregate not implemented.\") def __arrow_array__(self, type=None): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray return ArrowTensorArray.from_numpy(self._tensor) #",
"underlying ndarray. TODO: would be better to derive from ExtensionOpsMixin, but not available",
"Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not",
"an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either",
"copy(self) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information",
"for information about this method. \"\"\" if name == \"sum\": return TensorArray(np.sum(self._tensor, axis=0))",
"pandas as pd from pandas.compat import set_function_name from pandas.core import ops # Internal",
"`pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO any or all values",
"sequence of numpy.ndarray, \" f\"but received {values} \" f\"of type '{type(values)}' instead.\") if",
"__getitem__(self, item) -> \"TensorArray\": \"\"\" See docstring in `Extension Array` class in `pandas/core/arrays/base.py`",
"series with np.asarray, then applied a function e.g. map_infer(array, is_float) to format strings",
"or tensors, where the outer dimension is the count of tensors in the",
"TODO: Copy cached properties too return ret def take( self, indices: Sequence[int], allow_fill:",
"pandas.core import ops # Internal imports import text_extensions_for_pandas.util as util @pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype):",
"method. \"\"\" return TensorType() def to_numpy(self, dtype=None, copy=False, na_value=pd.api.extensions.no_default): \"\"\" See docstring in",
"from ExtensionOpsMixin, but not available \"\"\" @classmethod def _create_method(cls, op, coerce_to_dtype=True): # NOTE:",
"TensorArray(values) @property def dtype(self) -> pd.api.extensions.ExtensionDtype: \"\"\" See docstring in `ExtensionArray` class in",
"docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information about this method. \"\"\" return",
"for i in range(len(indices)): if indices[i] < 0: # Note that Numpy will",
"return self._tensor[item] else: return TensorArray(self._tensor[item]) def __setitem__(self, key: Union[int, np.ndarray], value: Any) ->",
"# TODO options return self._tensor def __len__(self) -> int: return len(self._tensor) def __getitem__(self,",
"@classmethod def construct_from_string(cls, string: str): \"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py`",
"= None ) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py`",
"provide operators on underlying ndarray. TODO: would be better to derive from ExtensionOpsMixin,",
"self._tensor.copy(), ) # TODO: Copy cached properties too return ret def take( self,",
"the License. # You may obtain a copy of the License at #",
"str): \"\"\" See docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information about this",
"Copyright (c) 2020 IBM Corp. # Licensed under the Apache License, Version 2.0",
"def _reduce(self, name, skipna=True, **kwargs): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py`",
"f\"not implemented\") def __repr__(self): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for",
"any or all values in row nan? return np.any(np.isnan(self._tensor), axis=1) def copy(self) ->",
"TensorOpsMixin): \"\"\" A Pandas `ExtensionArray` that represents a column of `numpy.ndarray`s, or tensors,",
"# distributed under the License is distributed on an \"AS IS\" BASIS, #",
"self._tensor.__repr__() def __str__(self): return self._tensor.__str__() def _reduce(self, name, skipna=True, **kwargs): \"\"\" See docstring",
"ExtensionOpsMixin, but not available \"\"\" @classmethod def _create_method(cls, op, coerce_to_dtype=True): # NOTE: this",
"is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF",
"f\"but received {values} \" f\"of type '{type(values)}' instead.\") if not self._tensor.flags.c_contiguous and make_contiguous:",
"-> None: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about",
"too return ret def take( self, indices: Sequence[int], allow_fill: bool = False, fill_value:",
"docstring in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information about this method. \"\"\" #",
"ret def take( self, indices: Sequence[int], allow_fill: bool = False, fill_value: Any =",
"# NOTE: this overrides, but coerce_to_dtype might not be needed def _binop(self, other):",
"# Upstream code uses exceptions as part of its normal control flow and",
"skipna=True, **kwargs): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about",
"allow_fill: # From API docs: \"[If allow_fill == True, then] negative values in",
"take( self, indices: Sequence[int], allow_fill: bool = False, fill_value: Any = None )",
"construct a '{cls.__name__}' from '{string}'\") @classmethod def construct_array_type(cls): \"\"\" See docstring in `ExtensionDType`",
"= True): \"\"\" :param values: A `numpy.ndarray` or sequence of `numpy.ndarray`s of equal",
"def to_numpy(self, dtype=None, copy=False, na_value=pd.api.extensions.no_default): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py`",
"in `pandas/core/arrays/base.py` for information about this method. \"\"\" values = self._tensor.take(indices, axis=0) if",
"ArrowTensorArray values = ArrowTensorArray.to_numpy(extension_array) return TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin to provide operators",
"\"\"\" ret = TensorArray( self._tensor.copy(), ) # TODO: Copy cached properties too return",
"import numpy as np import pandas as pd from pandas.compat import set_function_name from",
"same shape. \"\"\" def __init__(self, values: Union[np.ndarray, Sequence[np.ndarray]], make_contiguous: bool = True): \"\"\"",
"return len(self._tensor) def __getitem__(self, item) -> \"TensorArray\": \"\"\" See docstring in `Extension Array`",
"TensorArray(pd.api.extensions.ExtensionArray, TensorOpsMixin): \"\"\" A Pandas `ExtensionArray` that represents a column of `numpy.ndarray`s, or",
"`ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" values = self._tensor.take(indices,",
"a numpy.ndarray or sequence of numpy.ndarray, \" f\"but received {values} \" f\"of type",
"where the outer dimension is the count of tensors in the column. Each",
"information about this method. \"\"\" # TODO options return self._tensor def __len__(self) ->",
"a column of `numpy.ndarray`s, or tensors, where the outer dimension is the count",
"and # limitations under the License. # # # tensor.py # # Part",
"with the License. # You may obtain a copy of the License at",
"# Internal imports import text_extensions_for_pandas.util as util @pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas data",
"value to the shape # of each row. values[i] = fill_value return TensorArray(values)",
"of text_extensions_for_pandas # # Pandas extensions to support columns of N-dimensional tensors of",
"ops # Internal imports import text_extensions_for_pandas.util as util @pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas",
"docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" #",
"From API docs: \"[If allow_fill == True, then] negative values in # `indices`",
"missing values. These values are set to # `fill_value`. for i in range(len(indices)):",
"numpy.ndarray or sequence of numpy.ndarray, \" f\"but received {values} \" f\"of type '{type(values)}'",
"numpy as np import pandas as pd from pandas.compat import set_function_name from pandas.core",
"information about this method. \"\"\" # Upstream code uses exceptions as part of",
"@property def type(self): \"\"\"The type for a single row of a TensorArray column.\"\"\"",
"nan? return np.any(np.isnan(self._tensor), axis=1) def copy(self) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray`",
"# `fill_value`. for i in range(len(indices)): if indices[i] < 0: # Note that",
"to # `fill_value`. for i in range(len(indices)): if indices[i] < 0: # Note",
"law or agreed to in writing, software # distributed under the License is",
"method. \"\"\" # TODO any or all values in row nan? return np.any(np.isnan(self._tensor),",
"else: raise TypeError(f\"Expected a numpy.ndarray or sequence of numpy.ndarray, \" f\"but received {values}",
"the License for the specific language governing permissions and # limitations under the",
"scalar item, or TensorArray for slice if isinstance(item, int): return self._tensor[item] else: return",
"util @pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas data type for a column of tensors",
"of equal shape. # from typing import * import numpy as np import",
"type(self): \"\"\"The type for a single row of a TensorArray column.\"\"\" return np.ndarray",
"set_function_name from pandas.core import ops # Internal imports import text_extensions_for_pandas.util as util @pd.api.extensions.register_extension_dtype",
"elif isinstance(values, Sequence): self._tensor = np.stack(values, axis=0) if len(values) > 0 else np.array([])",
"Sequence[int], allow_fill: bool = False, fill_value: Any = None ) -> \"TensorArray\": \"\"\"",
"on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,",
"(c) 2020 IBM Corp. # Licensed under the Apache License, Version 2.0 (the",
"`Extension Array` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO",
"Any = None ) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in",
"as pd from pandas.compat import set_function_name from pandas.core import ops # Internal imports",
"about this method. \"\"\" return TensorArray def __from_arrow__(self, extension_array): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray",
"string == cls.__name__: return cls() else: raise TypeError( f\"Cannot construct a '{cls.__name__}' from",
"memory \"\"\" if isinstance(values, np.ndarray): self._tensor = values elif isinstance(values, Sequence): self._tensor =",
"this method. \"\"\" values = self._tensor.take(indices, axis=0) if allow_fill: # From API docs:",
"None ) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for",
"axis=0) if len(values) > 0 else np.array([]) else: raise TypeError(f\"Expected a numpy.ndarray or",
"as util @pd.api.extensions.register_extension_dtype class TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas data type for a column of",
"tensor must have the same shape. \"\"\" def __init__(self, values: Union[np.ndarray, Sequence[np.ndarray]], make_contiguous:",
"in compliance with the License. # You may obtain a copy of the",
"operators on underlying ndarray. TODO: would be better to derive from ExtensionOpsMixin, but",
"int): return self._tensor[item] else: return TensorArray(self._tensor[item]) def __setitem__(self, key: Union[int, np.ndarray], value: Any)",
"License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or",
"'{cls.__name__}' from '{string}'\") @classmethod def construct_array_type(cls): \"\"\" See docstring in `ExtensionDType` class in",
"slice)): self._tensor[key] = value else: raise NotImplementedError(f\"__setitem__ with key type '{type(key)}' \" f\"not",
"# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #",
"from typing import * import numpy as np import pandas as pd from",
"isinstance(values, np.ndarray): self._tensor = values elif isinstance(values, Sequence): self._tensor = np.stack(values, axis=0) if",
"a single row of a TensorArray column.\"\"\" return np.ndarray @property def name(self) ->",
"at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed",
"the License. # # # tensor.py # # Part of text_extensions_for_pandas # #",
"__from_arrow__(self, extension_array): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray values = ArrowTensorArray.to_numpy(extension_array) return TensorArray(values) class TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin):",
"np.ndarray): self._tensor = values elif isinstance(values, Sequence): self._tensor = np.stack(values, axis=0) if len(values)",
"pd from pandas.compat import set_function_name from pandas.core import ops # Internal imports import",
"raise NotImplementedError(f\"__setitem__ with key type '{type(key)}' \" f\"not implemented\") def __repr__(self): \"\"\" See",
"values. These values are set to # `fill_value`. for i in range(len(indices)): if",
"information about this method. \"\"\" return self._tensor.__repr__() def __str__(self): return self._tensor.__str__() def _reduce(self,",
"self._tensor def __len__(self) -> int: return len(self._tensor) def __getitem__(self, item) -> \"TensorArray\": \"\"\"",
"equal shape. # from typing import * import numpy as np import pandas",
"\"\"\" if isinstance(values, np.ndarray): self._tensor = values elif isinstance(values, Sequence): self._tensor = np.stack(values,",
"`pandas/core/arrays/base.py` for information about this method. \"\"\" if isinstance(key, (int, slice)): self._tensor[key] =",
"if isinstance(key, (int, slice)): self._tensor[key] = value else: raise NotImplementedError(f\"__setitem__ with key type",
"See the License for the specific language governing permissions and # limitations under",
"represents a column of `numpy.ndarray`s, or tensors, where the outer dimension is the",
"BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
"\"[If allow_fill == True, then] negative values in # `indices` indicate missing values.",
"to_numpy(self, dtype=None, copy=False, na_value=pd.api.extensions.no_default): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for",
"the count of tensors in the column. Each tensor must have the same",
"support columns of N-dimensional tensors of equal shape. # from typing import *",
"information about this method. \"\"\" return TensorArray def __from_arrow__(self, extension_array): from text_extensions_for_pandas.array.arrow_conversion import",
"a column of tensors with the same shape. \"\"\" @property def type(self): \"\"\"The",
"derive from ExtensionOpsMixin, but not available \"\"\" @classmethod def _create_method(cls, op, coerce_to_dtype=True): #",
"a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required",
"TensorArray( self._tensor.copy(), ) # TODO: Copy cached properties too return ret def take(",
"converts series with np.asarray, then applied a function e.g. map_infer(array, is_float) to format",
"# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in",
"for scalar item, or TensorArray for slice if isinstance(item, int): return self._tensor[item] else:",
"extensions to support columns of N-dimensional tensors of equal shape. # from typing",
"def __init__(self, values: Union[np.ndarray, Sequence[np.ndarray]], make_contiguous: bool = True): \"\"\" :param values: A",
"for a column of tensors with the same shape. \"\"\" @property def type(self):",
"if string == cls.__name__: return cls() else: raise TypeError( f\"Cannot construct a '{cls.__name__}'",
"type for a column of tensors with the same shape. \"\"\" @property def",
"cached properties too return ret def take( self, indices: Sequence[int], allow_fill: bool =",
"def _concat_same_type( cls, to_concat: Sequence[\"TensorArray\"] ) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray`",
"method bogus class names. if string == cls.__name__: return cls() else: raise TypeError(",
"key type '{type(key)}' \" f\"not implemented\") def __repr__(self): \"\"\" See docstring in `ExtensionArray`",
"to be contiguous in memory \"\"\" if isinstance(values, np.ndarray): self._tensor = values elif",
"to derive from ExtensionOpsMixin, but not available \"\"\" @classmethod def _create_method(cls, op, coerce_to_dtype=True):",
"values in row nan? return np.any(np.isnan(self._tensor), axis=1) def copy(self) -> \"TensorArray\": \"\"\" See",
"class in `pandas/core/arrays/base.py` for information about this method. \"\"\" if name == \"sum\":",
"not be needed def _binop(self, other): lvalues = self._tensor rvalues = other._tensor if",
"if allow_fill: # From API docs: \"[If allow_fill == True, then] negative values",
"\"\"\" if name == \"sum\": return TensorArray(np.sum(self._tensor, axis=0)) else: raise NotImplementedError(f\"'{name}' aggregate not",
"specific language governing permissions and # limitations under the License. # # #",
"this method. \"\"\" # TODO options return self._tensor def __len__(self) -> int: return",
"slice if isinstance(item, int): return self._tensor[item] else: return TensorArray(self._tensor[item]) def __setitem__(self, key: Union[int,",
"isinstance(key, (int, slice)): self._tensor[key] = value else: raise NotImplementedError(f\"__setitem__ with key type '{type(key)}'",
"\"TensorType\" @classmethod def construct_from_string(cls, string: str): \"\"\" See docstring in `ExtensionDType` class in",
"method. \"\"\" # TODO pandas converts series with np.asarray, then applied a function",
"A `numpy.ndarray` or sequence of `numpy.ndarray`s of equal shape. :param make_contiguous: force values",
"about this method. \"\"\" # Upstream code uses exceptions as part of its",
"in `pandas/core/arrays/base.py` for information about this method. \"\"\" if name == \"sum\": return",
"not available \"\"\" @classmethod def _create_method(cls, op, coerce_to_dtype=True): # NOTE: this overrides, but",
"values to be contiguous in memory \"\"\" if isinstance(values, np.ndarray): self._tensor = values",
"# from typing import * import numpy as np import pandas as pd",
"as np import pandas as pd from pandas.compat import set_function_name from pandas.core import",
"force values to be contiguous in memory \"\"\" if isinstance(values, np.ndarray): self._tensor =",
"Version 2.0 (the \"License\"); # you may not use this file except in",
"tensors of equal shape. # from typing import * import numpy as np",
"except in compliance with the License. # You may obtain a copy of",
"\"\"\" # Upstream code uses exceptions as part of its normal control flow",
"@property def name(self) -> str: \"\"\"A string representation of the dtype.\"\"\" return \"TensorType\"",
"self._tensor = np.stack(values, axis=0) if len(values) > 0 else np.array([]) else: raise TypeError(f\"Expected",
"np.array([]) else: raise TypeError(f\"Expected a numpy.ndarray or sequence of numpy.ndarray, \" f\"but received",
"return TensorType() def to_numpy(self, dtype=None, copy=False, na_value=pd.api.extensions.no_default): \"\"\" See docstring in `ExtensionArray` class",
"\"\"\" # TODO pandas converts series with np.asarray, then applied a function e.g.",
"to provide operators on underlying ndarray. TODO: would be better to derive from",
"`pandas/core/arrays/base.py` for information about this method. \"\"\" return self._tensor.__repr__() def __str__(self): return self._tensor.__str__()",
"type '{type(values)}' instead.\") if not self._tensor.flags.c_contiguous and make_contiguous: self._tensor = np.ascontiguousarray(self._tensor) @classmethod def",
"equal shape. :param make_contiguous: force values to be contiguous in memory \"\"\" if",
"in row nan? return np.any(np.isnan(self._tensor), axis=1) def copy(self) -> \"TensorArray\": \"\"\" See docstring",
"# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0",
"may not use this file except in compliance with the License. # You",
"License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS",
"method. \"\"\" return TensorArray def __from_arrow__(self, extension_array): from text_extensions_for_pandas.array.arrow_conversion import ArrowTensorArray values =",
"class in `pandas/core/arrays/base.py` for information about this method. \"\"\" ret = TensorArray( self._tensor.copy(),",
"this method. \"\"\" ret = TensorArray( self._tensor.copy(), ) # TODO: Copy cached properties",
"`ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" if isinstance(key, (int,",
"etc. # Return an ndarray for scalar item, or TensorArray for slice if",
"for information about this method. \"\"\" return TensorArray(np.concatenate([a._tensor for a in to_concat])) def",
"if len(values) > 0 else np.array([]) else: raise TypeError(f\"Expected a numpy.ndarray or sequence",
"\"\"\" @property def type(self): \"\"\"The type for a single row of a TensorArray",
"in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" # TODO",
"`pandas/core/dtypes/base.py` for information about this method. \"\"\" return TensorArray def __from_arrow__(self, extension_array): from",
"with the same shape. \"\"\" @property def type(self): \"\"\"The type for a single",
"'{type(values)}' instead.\") if not self._tensor.flags.c_contiguous and make_contiguous: self._tensor = np.ascontiguousarray(self._tensor) @classmethod def _concat_same_type(",
"{values} \" f\"of type '{type(values)}' instead.\") if not self._tensor.flags.c_contiguous and make_contiguous: self._tensor =",
"values elif isinstance(values, Sequence): self._tensor = np.stack(values, axis=0) if len(values) > 0 else",
"the specific language governing permissions and # limitations under the License. # #",
"make_contiguous: self._tensor = np.ascontiguousarray(self._tensor) @classmethod def _concat_same_type( cls, to_concat: Sequence[\"TensorArray\"] ) -> \"TensorArray\":",
"# From API docs: \"[If allow_fill == True, then] negative values in #",
"# Pandas extensions to support columns of N-dimensional tensors of equal shape. #",
"2020 IBM Corp. # Licensed under the Apache License, Version 2.0 (the \"License\");",
"in `pandas/core/arrays/base.py` for information about this method. \"\"\" ret = TensorArray( self._tensor.copy(), )",
"else: raise TypeError( f\"Cannot construct a '{cls.__name__}' from '{string}'\") @classmethod def construct_array_type(cls): \"\"\"",
"the column. Each tensor must have the same shape. \"\"\" def __init__(self, values:",
"= values elif isinstance(values, Sequence): self._tensor = np.stack(values, axis=0) if len(values) > 0",
"if isinstance(other, TensorArray) else other res = op(lvalues, rvalues) return TensorArray(res) op_name =",
"fill value to the shape # of each row. values[i] = fill_value return",
"< 0: # Note that Numpy will broadcast the fill value to the",
"of its normal control flow and # will pass this method bogus class",
"make_contiguous: force values to be contiguous in memory \"\"\" if isinstance(values, np.ndarray): self._tensor",
"axis=0) if allow_fill: # From API docs: \"[If allow_fill == True, then] negative",
"return ret def take( self, indices: Sequence[int], allow_fill: bool = False, fill_value: Any",
"for information about this method. \"\"\" return TensorType() def to_numpy(self, dtype=None, copy=False, na_value=pd.api.extensions.no_default):",
"the same shape. \"\"\" @property def type(self): \"\"\"The type for a single row",
"value: Any) -> None: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for",
"for information about this method. \"\"\" # Upstream code uses exceptions as part",
"TensorOpsMixin(pd.api.extensions.ExtensionScalarOpsMixin): \"\"\" Mixin to provide operators on underlying ndarray. TODO: would be better",
"for information about this method. \"\"\" return self._tensor.__repr__() def __str__(self): return self._tensor.__str__() def",
"# Return an ndarray for scalar item, or TensorArray for slice if isinstance(item,",
"that Numpy will broadcast the fill value to the shape # of each",
"set to # `fill_value`. for i in range(len(indices)): if indices[i] < 0: #",
"strings etc. # Return an ndarray for scalar item, or TensorArray for slice",
"values: A `numpy.ndarray` or sequence of `numpy.ndarray`s of equal shape. :param make_contiguous: force",
"in `ExtensionArray` class in `pandas/core/arrays/base.py` for information about this method. \"\"\" return TensorArray(np.concatenate([a._tensor",
"`fill_value`. for i in range(len(indices)): if indices[i] < 0: # Note that Numpy",
"in `pandas/core/arrays/base.py` for information about this method. \"\"\" if isinstance(key, (int, slice)): self._tensor[key]",
"in `ExtensionDType` class in `pandas/core/dtypes/base.py` for information about this method. \"\"\" return TensorArray",
"distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT",
"class TensorType(pd.api.extensions.ExtensionDtype): \"\"\" Pandas data type for a column of tensors with the",
"about this method. \"\"\" # TODO any or all values in row nan?",
"False, fill_value: Any = None ) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray`",
"dtype(self) -> pd.api.extensions.ExtensionDtype: \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information",
"= op(lvalues, rvalues) return TensorArray(res) op_name = ops._get_op_name(op, True) return set_function_name(_binop, op_name, cls)",
"control flow and # will pass this method bogus class names. if string",
"normal control flow and # will pass this method bogus class names. if",
"TensorArray) else other res = op(lvalues, rvalues) return TensorArray(res) op_name = ops._get_op_name(op, True)",
"_concat_same_type( cls, to_concat: Sequence[\"TensorArray\"] ) -> \"TensorArray\": \"\"\" See docstring in `ExtensionArray` class",
"implemented\") def __repr__(self): \"\"\" See docstring in `ExtensionArray` class in `pandas/core/arrays/base.py` for information",
"True): \"\"\" :param values: A `numpy.ndarray` or sequence of `numpy.ndarray`s of equal shape.",
"# Copyright (c) 2020 IBM Corp. # Licensed under the Apache License, Version",
"str: \"\"\"A string representation of the dtype.\"\"\" return \"TensorType\" @classmethod def construct_from_string(cls, string:",
"import pandas as pd from pandas.compat import set_function_name from pandas.core import ops #",
"values: Union[np.ndarray, Sequence[np.ndarray]], make_contiguous: bool = True): \"\"\" :param values: A `numpy.ndarray` or"
] |
[
"# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you",
"writing, software # distributed under the License is distributed on an \"AS IS\"",
"= datafiles, scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py', ], entry_points = { 'console_scripts': [ 'datastore-add-laser =",
"datafiles = [(os.path.join(package_name, root), [os.path.join(root, f) for f in files]) for root, dirs,",
"KIND, either express or implied. # See the License for the specific language",
"Unless required by applicable law or agreed to in writing, software # distributed",
"You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #",
"the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR",
"# See the License for the specific language governing permissions and # limitations",
"License. # You may obtain a copy of the License at # #",
"'wsgi' datafiles = [(os.path.join(package_name, root), [os.path.join(root, f) for f in files]) for root,",
"= 'wsgi' datafiles = [(os.path.join(package_name, root), [os.path.join(root, f) for f in files]) for",
"f in files]) for root, dirs, files in os.walk(webdir)] setup( name=package_name, version='0.1.0', description='Access",
"law or agreed to in writing, software # distributed under the License is",
"the License for the specific language governing permissions and # limitations under the",
"License. #!/usr/bin/env python from setuptools import setup import os package_name = 'bdkd-laser-data' webdir",
"data', author='Sirca Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'], data_files = datafiles, scripts=[",
"os package_name = 'bdkd-laser-data' webdir = 'wsgi' datafiles = [(os.path.join(package_name, root), [os.path.join(root, f)",
"compliance with the License. # You may obtain a copy of the License",
"on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,",
"this file except in compliance with the License. # You may obtain a",
"the Apache License, Version 2.0 (the \"License\"); # you may not use this",
"scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py', ], entry_points = { 'console_scripts': [ 'datastore-add-laser = bdkd.laser.util.add:add_laser_util', ],",
"you may not use this file except in compliance with the License. #",
"setup import os package_name = 'bdkd-laser-data' webdir = 'wsgi' datafiles = [(os.path.join(package_name, root),",
"for the specific language governing permissions and # limitations under the License. #!/usr/bin/env",
"= [(os.path.join(package_name, root), [os.path.join(root, f) for f in files]) for root, dirs, files",
"of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable",
"under the License. #!/usr/bin/env python from setuptools import setup import os package_name =",
"packages=['bdkd.laser', 'bdkd.laser.util'], data_files = datafiles, scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py', ], entry_points = { 'console_scripts':",
"files]) for root, dirs, files in os.walk(webdir)] setup( name=package_name, version='0.1.0', description='Access dataset data',",
"ANY KIND, either express or implied. # See the License for the specific",
"'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'], data_files = datafiles, scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py', ], entry_points = {",
"Copyright 2015 Nicta # # Licensed under the Apache License, Version 2.0 (the",
"package_dir={'': 'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'], data_files = datafiles, scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py', ], entry_points =",
"from setuptools import setup import os package_name = 'bdkd-laser-data' webdir = 'wsgi' datafiles",
"root, dirs, files in os.walk(webdir)] setup( name=package_name, version='0.1.0', description='Access dataset data', author='Sirca Ltd',",
"in compliance with the License. # You may obtain a copy of the",
"License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or",
"entry_points = { 'console_scripts': [ 'datastore-add-laser = bdkd.laser.util.add:add_laser_util', ], }, install_requires=['boto', 'PyYAML', 'bdkd-datastore',",
"# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #",
"use this file except in compliance with the License. # You may obtain",
"at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed",
"author='Sirca Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'], data_files = datafiles, scripts=[ 'bin/pack_maps.py',",
"webdir = 'wsgi' datafiles = [(os.path.join(package_name, root), [os.path.join(root, f) for f in files])",
"not use this file except in compliance with the License. # You may",
"files in os.walk(webdir)] setup( name=package_name, version='0.1.0', description='Access dataset data', author='Sirca Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd',",
"WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See",
"in files]) for root, dirs, files in os.walk(webdir)] setup( name=package_name, version='0.1.0', description='Access dataset",
"setuptools import setup import os package_name = 'bdkd-laser-data' webdir = 'wsgi' datafiles =",
"See the License for the specific language governing permissions and # limitations under",
"[(os.path.join(package_name, root), [os.path.join(root, f) for f in files]) for root, dirs, files in",
"BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.",
"License, Version 2.0 (the \"License\"); # you may not use this file except",
"# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may",
"package_name = 'bdkd-laser-data' webdir = 'wsgi' datafiles = [(os.path.join(package_name, root), [os.path.join(root, f) for",
"the specific language governing permissions and # limitations under the License. #!/usr/bin/env python",
"IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or",
"# limitations under the License. #!/usr/bin/env python from setuptools import setup import os",
"Nicta # # Licensed under the Apache License, Version 2.0 (the \"License\"); #",
"a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required",
"# Copyright 2015 Nicta # # Licensed under the Apache License, Version 2.0",
"datafiles, scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py', ], entry_points = { 'console_scripts': [ 'datastore-add-laser = bdkd.laser.util.add:add_laser_util',",
"distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY",
"# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in",
"dirs, files in os.walk(webdir)] setup( name=package_name, version='0.1.0', description='Access dataset data', author='Sirca Ltd', author_email='<EMAIL>',",
"OF ANY KIND, either express or implied. # See the License for the",
"2.0 (the \"License\"); # you may not use this file except in compliance",
"# you may not use this file except in compliance with the License.",
"agreed to in writing, software # distributed under the License is distributed on",
"import setup import os package_name = 'bdkd-laser-data' webdir = 'wsgi' datafiles = [(os.path.join(package_name,",
"WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the",
"(the \"License\"); # you may not use this file except in compliance with",
"author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'], data_files = datafiles, scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py', ],",
"specific language governing permissions and # limitations under the License. #!/usr/bin/env python from",
"os.walk(webdir)] setup( name=package_name, version='0.1.0', description='Access dataset data', author='Sirca Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'': 'lib'},",
"# # Unless required by applicable law or agreed to in writing, software",
"= 'bdkd-laser-data' webdir = 'wsgi' datafiles = [(os.path.join(package_name, root), [os.path.join(root, f) for f",
"express or implied. # See the License for the specific language governing permissions",
"Version 2.0 (the \"License\"); # you may not use this file except in",
"# Unless required by applicable law or agreed to in writing, software #",
"except in compliance with the License. # You may obtain a copy of",
"version='0.1.0', description='Access dataset data', author='Sirca Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'], data_files",
"by applicable law or agreed to in writing, software # distributed under the",
"copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by",
"either express or implied. # See the License for the specific language governing",
"[os.path.join(root, f) for f in files]) for root, dirs, files in os.walk(webdir)] setup(",
"software # distributed under the License is distributed on an \"AS IS\" BASIS,",
"'bin/pack_maps.py', 'bin/pack_raw.py', ], entry_points = { 'console_scripts': [ 'datastore-add-laser = bdkd.laser.util.add:add_laser_util', ], },",
"# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0",
"may not use this file except in compliance with the License. # You",
"License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS",
"import os package_name = 'bdkd-laser-data' webdir = 'wsgi' datafiles = [(os.path.join(package_name, root), [os.path.join(root,",
"data_files = datafiles, scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py', ], entry_points = { 'console_scripts': [ 'datastore-add-laser",
"Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not",
"an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either",
"# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to",
"description='Access dataset data', author='Sirca Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'], data_files =",
"file except in compliance with the License. # You may obtain a copy",
"= { 'console_scripts': [ 'datastore-add-laser = bdkd.laser.util.add:add_laser_util', ], }, install_requires=['boto', 'PyYAML', 'bdkd-datastore', 'h5py']",
"under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES",
"License for the specific language governing permissions and # limitations under the License.",
"the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law",
"limitations under the License. #!/usr/bin/env python from setuptools import setup import os package_name",
"the License. # You may obtain a copy of the License at #",
"f) for f in files]) for root, dirs, files in os.walk(webdir)] setup( name=package_name,",
"#!/usr/bin/env python from setuptools import setup import os package_name = 'bdkd-laser-data' webdir =",
"to in writing, software # distributed under the License is distributed on an",
"\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express",
"name=package_name, version='0.1.0', description='Access dataset data', author='Sirca Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'],",
"2015 Nicta # # Licensed under the Apache License, Version 2.0 (the \"License\");",
"# distributed under the License is distributed on an \"AS IS\" BASIS, #",
"for f in files]) for root, dirs, files in os.walk(webdir)] setup( name=package_name, version='0.1.0',",
"implied. # See the License for the specific language governing permissions and #",
"'bin/pack_raw.py', ], entry_points = { 'console_scripts': [ 'datastore-add-laser = bdkd.laser.util.add:add_laser_util', ], }, install_requires=['boto',",
"\"License\"); # you may not use this file except in compliance with the",
"is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF",
"obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless",
"required by applicable law or agreed to in writing, software # distributed under",
"setup( name=package_name, version='0.1.0', description='Access dataset data', author='Sirca Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=['bdkd.laser',",
"applicable law or agreed to in writing, software # distributed under the License",
"python from setuptools import setup import os package_name = 'bdkd-laser-data' webdir = 'wsgi'",
"or agreed to in writing, software # distributed under the License is distributed",
"governing permissions and # limitations under the License. #!/usr/bin/env python from setuptools import",
"'bdkd-laser-data' webdir = 'wsgi' datafiles = [(os.path.join(package_name, root), [os.path.join(root, f) for f in",
"or implied. # See the License for the specific language governing permissions and",
"permissions and # limitations under the License. #!/usr/bin/env python from setuptools import setup",
"], entry_points = { 'console_scripts': [ 'datastore-add-laser = bdkd.laser.util.add:add_laser_util', ], }, install_requires=['boto', 'PyYAML',",
"Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'], data_files = datafiles, scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py',",
"distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT",
"CONDITIONS OF ANY KIND, either express or implied. # See the License for",
"and # limitations under the License. #!/usr/bin/env python from setuptools import setup import",
"dataset data', author='Sirca Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'], data_files = datafiles,",
"Apache License, Version 2.0 (the \"License\"); # you may not use this file",
"language governing permissions and # limitations under the License. #!/usr/bin/env python from setuptools",
"OR CONDITIONS OF ANY KIND, either express or implied. # See the License",
"may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #",
"'bdkd.laser.util'], data_files = datafiles, scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py', ], entry_points = { 'console_scripts': [",
"the License. #!/usr/bin/env python from setuptools import setup import os package_name = 'bdkd-laser-data'",
"url='http://github.com/sirca/bdkd', package_dir={'': 'lib'}, packages=['bdkd.laser', 'bdkd.laser.util'], data_files = datafiles, scripts=[ 'bin/pack_maps.py', 'bin/pack_raw.py', ], entry_points",
"with the License. # You may obtain a copy of the License at",
"http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,",
"root), [os.path.join(root, f) for f in files]) for root, dirs, files in os.walk(webdir)]",
"in writing, software # distributed under the License is distributed on an \"AS",
"for root, dirs, files in os.walk(webdir)] setup( name=package_name, version='0.1.0', description='Access dataset data', author='Sirca",
"in os.walk(webdir)] setup( name=package_name, version='0.1.0', description='Access dataset data', author='Sirca Ltd', author_email='<EMAIL>', url='http://github.com/sirca/bdkd', package_dir={'':",
"{ 'console_scripts': [ 'datastore-add-laser = bdkd.laser.util.add:add_laser_util', ], }, install_requires=['boto', 'PyYAML', 'bdkd-datastore', 'h5py'] )",
"under the Apache License, Version 2.0 (the \"License\"); # you may not use"
] |
[
"from datetime import datetime from ... import db class Story(db.Model): \"\"\" This model",
"\"\"\" Returns the object reprensentation \"\"\" return '<Task %r>' % self.content def to_json(self):",
"= db.Column(db.Text, nullable=False) featured_img_url = db.Column(db.Text, nullable=False) approved_count = db.Column(db.Integer, nullable=False) fake_count =",
":return: story JSON object \"\"\" json_story = { 'title': self.title, 'content': self.content, 'featured_img_url':",
"\"\"\" Returns a JSON object :return: story JSON object \"\"\" json_story = {",
"= db.Column(db.Text, nullable=False) approved_count = db.Column(db.Integer, nullable=False) fake_count = db.Column(db.Integer, nullable=False) mixedvote_count =",
"nullable=False) date_added = db.Column(db.Text, default=datetime.now()) def __init__(self, title, content, featured_img_url, approved_count, fake_count, mixedvote_count):",
"__tablename__ = 'story' id = db.Column(db.Integer, primary_key=True) title = db.Column(db.Text, nullable=False) content =",
"db.Column(db.Text, default=datetime.now()) def __init__(self, title, content, featured_img_url, approved_count, fake_count, mixedvote_count): \"\"\" Initialize the",
"self.mixedvote_count, 'date_added': self.date_added } return json_story def save(self): \"\"\" Save a story to",
"self.title, 'content': self.content, 'featured_img_url': self.featured_img_url, 'approved_count': self.approved_count, 'fake_count': self.fake_count, 'mixedvote_count': self.mixedvote_count, 'date_added': self.date_added",
"__repr__(self): \"\"\" Returns the object reprensentation \"\"\" return '<Task %r>' % self.content def",
"db.Column(db.Integer, nullable=False) date_added = db.Column(db.Text, default=datetime.now()) def __init__(self, title, content, featured_img_url, approved_count, fake_count,",
"'content': self.content, 'featured_img_url': self.featured_img_url, 'approved_count': self.approved_count, 'fake_count': self.fake_count, 'mixedvote_count': self.mixedvote_count, 'date_added': self.date_added }",
"holds information about Story \"\"\" __tablename__ = 'story' id = db.Column(db.Integer, primary_key=True) title",
"self.content, 'featured_img_url': self.featured_img_url, 'approved_count': self.approved_count, 'fake_count': self.fake_count, 'mixedvote_count': self.mixedvote_count, 'date_added': self.date_added } return",
"featured_img_url, approved_count, fake_count, mixedvote_count): \"\"\" Initialize the instance \"\"\" self.title = title self.content",
"= approved_count self.fake_count = fake_count self.mixedvote_count = mixedvote_count def __repr__(self): \"\"\" Returns the",
"date_added = db.Column(db.Text, default=datetime.now()) def __init__(self, title, content, featured_img_url, approved_count, fake_count, mixedvote_count): \"\"\"",
"'title': self.title, 'content': self.content, 'featured_img_url': self.featured_img_url, 'approved_count': self.approved_count, 'fake_count': self.fake_count, 'mixedvote_count': self.mixedvote_count, 'date_added':",
"\"\"\" This model holds information about Story \"\"\" __tablename__ = 'story' id =",
"self.content = content self.featured_img_url = featured_img_url self.approved_count = approved_count self.fake_count = fake_count self.mixedvote_count",
"approved_count = db.Column(db.Integer, nullable=False) fake_count = db.Column(db.Integer, nullable=False) mixedvote_count = db.Column(db.Integer, nullable=False) date_added",
"fake_count, mixedvote_count): \"\"\" Initialize the instance \"\"\" self.title = title self.content = content",
"return json_story def save(self): \"\"\" Save a story to the database. This includes",
"db.Column(db.Text, nullable=False) content = db.Column(db.Text, nullable=False) featured_img_url = db.Column(db.Text, nullable=False) approved_count = db.Column(db.Integer,",
"db.Column(db.Integer, primary_key=True) title = db.Column(db.Text, nullable=False) content = db.Column(db.Text, nullable=False) featured_img_url = db.Column(db.Text,",
"db.Column(db.Integer, nullable=False) mixedvote_count = db.Column(db.Integer, nullable=False) date_added = db.Column(db.Text, default=datetime.now()) def __init__(self, title,",
"\"\"\" __tablename__ = 'story' id = db.Column(db.Integer, primary_key=True) title = db.Column(db.Text, nullable=False) content",
"reprensentation \"\"\" return '<Task %r>' % self.content def to_json(self): \"\"\" Returns a JSON",
"class Story(db.Model): \"\"\" This model holds information about Story \"\"\" __tablename__ = 'story'",
"def save(self): \"\"\" Save a story to the database. This includes creating a",
"database. This includes creating a new story and editing one. \"\"\" db.session.add(self) db.session.commit()",
"featured_img_url self.approved_count = approved_count self.fake_count = fake_count self.mixedvote_count = mixedvote_count def __repr__(self): \"\"\"",
"title = db.Column(db.Text, nullable=False) content = db.Column(db.Text, nullable=False) featured_img_url = db.Column(db.Text, nullable=False) approved_count",
"\"\"\" Initialize the instance \"\"\" self.title = title self.content = content self.featured_img_url =",
"= content self.featured_img_url = featured_img_url self.approved_count = approved_count self.fake_count = fake_count self.mixedvote_count =",
"content self.featured_img_url = featured_img_url self.approved_count = approved_count self.fake_count = fake_count self.mixedvote_count = mixedvote_count",
"about Story \"\"\" __tablename__ = 'story' id = db.Column(db.Integer, primary_key=True) title = db.Column(db.Text,",
"'approved_count': self.approved_count, 'fake_count': self.fake_count, 'mixedvote_count': self.mixedvote_count, 'date_added': self.date_added } return json_story def save(self):",
"a story to the database. This includes creating a new story and editing",
"db.Column(db.Text, nullable=False) featured_img_url = db.Column(db.Text, nullable=False) approved_count = db.Column(db.Integer, nullable=False) fake_count = db.Column(db.Integer,",
"title self.content = content self.featured_img_url = featured_img_url self.approved_count = approved_count self.fake_count = fake_count",
"\"\"\" json_story = { 'title': self.title, 'content': self.content, 'featured_img_url': self.featured_img_url, 'approved_count': self.approved_count, 'fake_count':",
"= title self.content = content self.featured_img_url = featured_img_url self.approved_count = approved_count self.fake_count =",
"information about Story \"\"\" __tablename__ = 'story' id = db.Column(db.Integer, primary_key=True) title =",
"instance \"\"\" self.title = title self.content = content self.featured_img_url = featured_img_url self.approved_count =",
"= fake_count self.mixedvote_count = mixedvote_count def __repr__(self): \"\"\" Returns the object reprensentation \"\"\"",
"content, featured_img_url, approved_count, fake_count, mixedvote_count): \"\"\" Initialize the instance \"\"\" self.title = title",
"the instance \"\"\" self.title = title self.content = content self.featured_img_url = featured_img_url self.approved_count",
"db.Column(db.Text, nullable=False) approved_count = db.Column(db.Integer, nullable=False) fake_count = db.Column(db.Integer, nullable=False) mixedvote_count = db.Column(db.Integer,",
"object \"\"\" json_story = { 'title': self.title, 'content': self.content, 'featured_img_url': self.featured_img_url, 'approved_count': self.approved_count,",
"db class Story(db.Model): \"\"\" This model holds information about Story \"\"\" __tablename__ =",
"id = db.Column(db.Integer, primary_key=True) title = db.Column(db.Text, nullable=False) content = db.Column(db.Text, nullable=False) featured_img_url",
"\"\"\" Save a story to the database. This includes creating a new story",
"mixedvote_count def __repr__(self): \"\"\" Returns the object reprensentation \"\"\" return '<Task %r>' %",
"\"\"\" self.title = title self.content = content self.featured_img_url = featured_img_url self.approved_count = approved_count",
"default=datetime.now()) def __init__(self, title, content, featured_img_url, approved_count, fake_count, mixedvote_count): \"\"\" Initialize the instance",
"nullable=False) fake_count = db.Column(db.Integer, nullable=False) mixedvote_count = db.Column(db.Integer, nullable=False) date_added = db.Column(db.Text, default=datetime.now())",
"object :return: story JSON object \"\"\" json_story = { 'title': self.title, 'content': self.content,",
"mixedvote_count = db.Column(db.Integer, nullable=False) date_added = db.Column(db.Text, default=datetime.now()) def __init__(self, title, content, featured_img_url,",
"title, content, featured_img_url, approved_count, fake_count, mixedvote_count): \"\"\" Initialize the instance \"\"\" self.title =",
"featured_img_url = db.Column(db.Text, nullable=False) approved_count = db.Column(db.Integer, nullable=False) fake_count = db.Column(db.Integer, nullable=False) mixedvote_count",
"This model holds information about Story \"\"\" __tablename__ = 'story' id = db.Column(db.Integer,",
"import db class Story(db.Model): \"\"\" This model holds information about Story \"\"\" __tablename__",
"save(self): \"\"\" Save a story to the database. This includes creating a new",
"story JSON object \"\"\" json_story = { 'title': self.title, 'content': self.content, 'featured_img_url': self.featured_img_url,",
"Story(db.Model): \"\"\" This model holds information about Story \"\"\" __tablename__ = 'story' id",
"= db.Column(db.Integer, nullable=False) mixedvote_count = db.Column(db.Integer, nullable=False) date_added = db.Column(db.Text, default=datetime.now()) def __init__(self,",
"def __init__(self, title, content, featured_img_url, approved_count, fake_count, mixedvote_count): \"\"\" Initialize the instance \"\"\"",
"self.fake_count = fake_count self.mixedvote_count = mixedvote_count def __repr__(self): \"\"\" Returns the object reprensentation",
"'<Task %r>' % self.content def to_json(self): \"\"\" Returns a JSON object :return: story",
"def to_json(self): \"\"\" Returns a JSON object :return: story JSON object \"\"\" json_story",
"self.approved_count, 'fake_count': self.fake_count, 'mixedvote_count': self.mixedvote_count, 'date_added': self.date_added } return json_story def save(self): \"\"\"",
"json_story = { 'title': self.title, 'content': self.content, 'featured_img_url': self.featured_img_url, 'approved_count': self.approved_count, 'fake_count': self.fake_count,",
"= db.Column(db.Integer, nullable=False) fake_count = db.Column(db.Integer, nullable=False) mixedvote_count = db.Column(db.Integer, nullable=False) date_added =",
"%r>' % self.content def to_json(self): \"\"\" Returns a JSON object :return: story JSON",
"approved_count, fake_count, mixedvote_count): \"\"\" Initialize the instance \"\"\" self.title = title self.content =",
"Returns a JSON object :return: story JSON object \"\"\" json_story = { 'title':",
"self.fake_count, 'mixedvote_count': self.mixedvote_count, 'date_added': self.date_added } return json_story def save(self): \"\"\" Save a",
"self.mixedvote_count = mixedvote_count def __repr__(self): \"\"\" Returns the object reprensentation \"\"\" return '<Task",
"Save a story to the database. This includes creating a new story and",
"= db.Column(db.Integer, primary_key=True) title = db.Column(db.Text, nullable=False) content = db.Column(db.Text, nullable=False) featured_img_url =",
"Returns the object reprensentation \"\"\" return '<Task %r>' % self.content def to_json(self): \"\"\"",
"nullable=False) mixedvote_count = db.Column(db.Integer, nullable=False) date_added = db.Column(db.Text, default=datetime.now()) def __init__(self, title, content,",
"the database. This includes creating a new story and editing one. \"\"\" db.session.add(self)",
"{ 'title': self.title, 'content': self.content, 'featured_img_url': self.featured_img_url, 'approved_count': self.approved_count, 'fake_count': self.fake_count, 'mixedvote_count': self.mixedvote_count,",
"object reprensentation \"\"\" return '<Task %r>' % self.content def to_json(self): \"\"\" Returns a",
"primary_key=True) title = db.Column(db.Text, nullable=False) content = db.Column(db.Text, nullable=False) featured_img_url = db.Column(db.Text, nullable=False)",
"fake_count self.mixedvote_count = mixedvote_count def __repr__(self): \"\"\" Returns the object reprensentation \"\"\" return",
"} return json_story def save(self): \"\"\" Save a story to the database. This",
"= db.Column(db.Integer, nullable=False) date_added = db.Column(db.Text, default=datetime.now()) def __init__(self, title, content, featured_img_url, approved_count,",
"... import db class Story(db.Model): \"\"\" This model holds information about Story \"\"\"",
"JSON object :return: story JSON object \"\"\" json_story = { 'title': self.title, 'content':",
"= db.Column(db.Text, nullable=False) content = db.Column(db.Text, nullable=False) featured_img_url = db.Column(db.Text, nullable=False) approved_count =",
"content = db.Column(db.Text, nullable=False) featured_img_url = db.Column(db.Text, nullable=False) approved_count = db.Column(db.Integer, nullable=False) fake_count",
"= 'story' id = db.Column(db.Integer, primary_key=True) title = db.Column(db.Text, nullable=False) content = db.Column(db.Text,",
"self.approved_count = approved_count self.fake_count = fake_count self.mixedvote_count = mixedvote_count def __repr__(self): \"\"\" Returns",
"to the database. This includes creating a new story and editing one. \"\"\"",
"to_json(self): \"\"\" Returns a JSON object :return: story JSON object \"\"\" json_story =",
"mixedvote_count): \"\"\" Initialize the instance \"\"\" self.title = title self.content = content self.featured_img_url",
"json_story def save(self): \"\"\" Save a story to the database. This includes creating",
"self.date_added } return json_story def save(self): \"\"\" Save a story to the database.",
"db.Column(db.Integer, nullable=False) fake_count = db.Column(db.Integer, nullable=False) mixedvote_count = db.Column(db.Integer, nullable=False) date_added = db.Column(db.Text,",
"import datetime from ... import db class Story(db.Model): \"\"\" This model holds information",
"__init__(self, title, content, featured_img_url, approved_count, fake_count, mixedvote_count): \"\"\" Initialize the instance \"\"\" self.title",
"the object reprensentation \"\"\" return '<Task %r>' % self.content def to_json(self): \"\"\" Returns",
"datetime from ... import db class Story(db.Model): \"\"\" This model holds information about",
"nullable=False) approved_count = db.Column(db.Integer, nullable=False) fake_count = db.Column(db.Integer, nullable=False) mixedvote_count = db.Column(db.Integer, nullable=False)",
"return '<Task %r>' % self.content def to_json(self): \"\"\" Returns a JSON object :return:",
"Initialize the instance \"\"\" self.title = title self.content = content self.featured_img_url = featured_img_url",
"= db.Column(db.Text, default=datetime.now()) def __init__(self, title, content, featured_img_url, approved_count, fake_count, mixedvote_count): \"\"\" Initialize",
"\"\"\" return '<Task %r>' % self.content def to_json(self): \"\"\" Returns a JSON object",
"'fake_count': self.fake_count, 'mixedvote_count': self.mixedvote_count, 'date_added': self.date_added } return json_story def save(self): \"\"\" Save",
"nullable=False) featured_img_url = db.Column(db.Text, nullable=False) approved_count = db.Column(db.Integer, nullable=False) fake_count = db.Column(db.Integer, nullable=False)",
"model holds information about Story \"\"\" __tablename__ = 'story' id = db.Column(db.Integer, primary_key=True)",
"'story' id = db.Column(db.Integer, primary_key=True) title = db.Column(db.Text, nullable=False) content = db.Column(db.Text, nullable=False)",
"self.featured_img_url = featured_img_url self.approved_count = approved_count self.fake_count = fake_count self.mixedvote_count = mixedvote_count def",
"approved_count self.fake_count = fake_count self.mixedvote_count = mixedvote_count def __repr__(self): \"\"\" Returns the object",
"JSON object \"\"\" json_story = { 'title': self.title, 'content': self.content, 'featured_img_url': self.featured_img_url, 'approved_count':",
"datetime import datetime from ... import db class Story(db.Model): \"\"\" This model holds",
"'featured_img_url': self.featured_img_url, 'approved_count': self.approved_count, 'fake_count': self.fake_count, 'mixedvote_count': self.mixedvote_count, 'date_added': self.date_added } return json_story",
"from ... import db class Story(db.Model): \"\"\" This model holds information about Story",
"= { 'title': self.title, 'content': self.content, 'featured_img_url': self.featured_img_url, 'approved_count': self.approved_count, 'fake_count': self.fake_count, 'mixedvote_count':",
"story to the database. This includes creating a new story and editing one.",
"fake_count = db.Column(db.Integer, nullable=False) mixedvote_count = db.Column(db.Integer, nullable=False) date_added = db.Column(db.Text, default=datetime.now()) def",
"def __repr__(self): \"\"\" Returns the object reprensentation \"\"\" return '<Task %r>' % self.content",
"Story \"\"\" __tablename__ = 'story' id = db.Column(db.Integer, primary_key=True) title = db.Column(db.Text, nullable=False)",
"'mixedvote_count': self.mixedvote_count, 'date_added': self.date_added } return json_story def save(self): \"\"\" Save a story",
"nullable=False) content = db.Column(db.Text, nullable=False) featured_img_url = db.Column(db.Text, nullable=False) approved_count = db.Column(db.Integer, nullable=False)",
"'date_added': self.date_added } return json_story def save(self): \"\"\" Save a story to the",
"self.featured_img_url, 'approved_count': self.approved_count, 'fake_count': self.fake_count, 'mixedvote_count': self.mixedvote_count, 'date_added': self.date_added } return json_story def",
"self.content def to_json(self): \"\"\" Returns a JSON object :return: story JSON object \"\"\"",
"self.title = title self.content = content self.featured_img_url = featured_img_url self.approved_count = approved_count self.fake_count",
"= featured_img_url self.approved_count = approved_count self.fake_count = fake_count self.mixedvote_count = mixedvote_count def __repr__(self):",
"% self.content def to_json(self): \"\"\" Returns a JSON object :return: story JSON object",
"a JSON object :return: story JSON object \"\"\" json_story = { 'title': self.title,",
"= mixedvote_count def __repr__(self): \"\"\" Returns the object reprensentation \"\"\" return '<Task %r>'"
] |
[
"serializers.SerializerMethodField() allegation_count = serializers.IntegerField() percentile_allegation = serializers.DecimalField( source='complaint_percentile', max_digits=6, decimal_places=4, allow_null=True ) percentile_trr",
"= serializers.SerializerMethodField() def get_kind(self, obj): raise NotImplementedError def get_priority_sort(self, obj): raise NotImplementedError def",
"get_priority_sort(self, obj): raise NotImplementedError def get_unit_name(self, obj): return obj.unit_name if obj.unit_name else ''",
"None def get_attachments(self, obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='start_date', format=None)",
"allow_null=True ) percentile_trr = serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4, allow_null=True) honorable_mention_count = serializers.IntegerField() sustained_count =",
"def get_priority_sort(self, obj): return 40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id = serializers.IntegerField(source='id') date_sort = serializers.SerializerMethodField()",
"point = serializers.SerializerMethodField() victims = VictimMobileSerializer(many=True) def get_date_sort(self, obj): return obj.allegation.incident_date.date() def get_date(self,",
"except AttributeError: return None def get_attachments(self, obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort",
"format='%Y-%m-%d') def get_kind(self, obj): return 'UNIT_CHANGE' def get_priority_sort(self, obj): return 20 class VictimMobileSerializer(NoNullSerializer):",
"PoliceUnitMobileSerializer(source='last_unit') date_of_appt = serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation = serializers.DateField(source='resignation_date', format='%Y-%m-%d') active = serializers.SerializerMethodField() rank",
"serializers.CharField() unit = PoliceUnitMobileSerializer(source='last_unit') date_of_appt = serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation = serializers.DateField(source='resignation_date', format='%Y-%m-%d') active",
"= serializers.IntegerField() full_name = serializers.CharField() rank = serializers.CharField() coaccusal_count = serializers.IntegerField() class OfficerCardMobileSerializer(OfficerPercentileSerializer):",
"= serializers.CharField(source='gender_display') race = serializers.CharField() age = serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer): title = serializers.CharField()",
"= serializers.IntegerField() trr_count = serializers.IntegerField() major_award_count = serializers.IntegerField() honorable_mention_percentile = serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True)",
"id = serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() crid =",
"PoliceUnitMobileSerializer(NoNullSerializer): unit_id = serializers.IntegerField(source='id') unit_name = serializers.CharField() description = serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer): officer_id",
"serializers.SerializerMethodField() crid = serializers.CharField() category = serializers.SerializerMethodField() subcategory = serializers.CharField() finding = serializers.CharField(source='final_finding_display')",
"return obj.category if obj.category else 'Unknown' def get_kind(self, obj): return 'CR' def get_priority_sort(self,",
"return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='start_date', format=None) date = serializers.DateField(source='start_date', format='%Y-%m-%d')",
"None class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr = serializers.DecimalField( source='trr_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_civilian =",
"kind = serializers.SerializerMethodField() def get_kind(self, obj): raise NotImplementedError def get_priority_sort(self, obj): raise NotImplementedError",
"date_sort = serializers.DateField(source='spp_date', format=None) date = serializers.DateField(source='spp_date', format='%Y-%m-%d') def get_kind(self, obj): return 'RANK_CHANGE'",
"'RANK_CHANGE' def get_priority_sort(self, obj): return 25 def get_rank(self, obj): return obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer):",
"get_badge(self, obj): return obj.current_badge or '' class BaseTimelineMobileSerializer(NoNullSerializer): unit_name = serializers.SerializerMethodField() unit_description =",
"serializers.CharField(source='gender_display') race = serializers.CharField() age = serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer): title = serializers.CharField() url",
"= serializers.ListField(child=serializers.CharField()) gender = serializers.CharField(source='gender_display') percentiles = serializers.SerializerMethodField() allegation_count = serializers.IntegerField() percentile_allegation =",
"CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField() rank = serializers.CharField() coaccusal_count = serializers.IntegerField()",
"20 class VictimMobileSerializer(NoNullSerializer): gender = serializers.CharField(source='gender_display') race = serializers.CharField() age = serializers.IntegerField() class",
"date = serializers.DateField(source='start_date', format='%Y-%m-%d') award_type = serializers.CharField() def get_kind(self, obj): return 'AWARD' def",
"serializers.IntegerField() trr_count = serializers.IntegerField() major_award_count = serializers.IntegerField() honorable_mention_percentile = serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True) def",
"= serializers.CharField() unit = PoliceUnitMobileSerializer(source='last_unit') date_of_appt = serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation = serializers.DateField(source='resignation_date', format='%Y-%m-%d')",
"= serializers.CharField() file_type = serializers.CharField() id = serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.SerializerMethodField()",
"point = serializers.SerializerMethodField() def get_kind(self, obj): return 'FORCE' def get_priority_sort(self, obj): return 50",
"obj.category else 'Unknown' def get_kind(self, obj): return 'CR' def get_priority_sort(self, obj): return 30",
"source='civilian_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_internal = serializers.DecimalField( source='internal_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4)",
"serializers.CharField() race = serializers.CharField() birth_year = serializers.IntegerField() badge = serializers.SerializerMethodField() historic_badges = serializers.ListField(child=serializers.CharField())",
"= serializers.SerializerMethodField() def get_kind(self, obj): return 'FORCE' def get_priority_sort(self, obj): return 50 def",
"= serializers.IntegerField() percentile_allegation = serializers.DecimalField( source='complaint_percentile', max_digits=6, decimal_places=4, allow_null=True ) percentile_trr = serializers.DecimalField(source='trr_percentile',",
"format=None) date = serializers.DateField(source='appointed_date', format='%Y-%m-%d') def get_kind(self, obj): return 'JOINED' def get_priority_sort(self, obj):",
"= serializers.SerializerMethodField() priority_sort = serializers.SerializerMethodField() kind = serializers.SerializerMethodField() def get_kind(self, obj): raise NotImplementedError",
"serializers.SerializerMethodField() def get_kind(self, obj): raise NotImplementedError def get_priority_sort(self, obj): raise NotImplementedError def get_unit_name(self,",
"OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr = serializers.DecimalField( source='trr_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_civilian = serializers.DecimalField( source='civilian_allegation_percentile',",
"serializers.SerializerMethodField() priority_sort = serializers.SerializerMethodField() kind = serializers.SerializerMethodField() def get_kind(self, obj): raise NotImplementedError def",
"obj): return 'RANK_CHANGE' def get_priority_sort(self, obj): return 25 def get_rank(self, obj): return obj.rank",
"badge = serializers.SerializerMethodField() historic_badges = serializers.ListField(child=serializers.CharField()) gender = serializers.CharField(source='gender_display') percentiles = serializers.SerializerMethodField() allegation_count",
"date = serializers.DateField(source='spp_date', format='%Y-%m-%d') def get_kind(self, obj): return 'RANK_CHANGE' def get_priority_sort(self, obj): return",
"{ 'lon': obj.allegation.point.x, 'lat': obj.allegation.point.y } except AttributeError: return None def get_attachments(self, obj):",
"obj.unit_description else '' def get_rank(self, obj): return obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='spp_date',",
"obj.unit_description if obj.unit_description else '' def get_rank(self, obj): return obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort",
"sustained_count = serializers.IntegerField() unsustained_count = serializers.IntegerField() discipline_count = serializers.IntegerField() civilian_compliment_count = serializers.IntegerField() trr_count",
"format='%Y-%m-%d') date_of_resignation = serializers.DateField(source='resignation_date', format='%Y-%m-%d') active = serializers.SerializerMethodField() rank = serializers.CharField() race =",
"AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='start_date', format=None) date = serializers.DateField(source='start_date', format='%Y-%m-%d') award_type = serializers.CharField() def",
"serializers.CharField() description = serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer): officer_id = serializers.IntegerField(source='id') full_name = serializers.CharField() unit",
"obj.point.y } except AttributeError: return None class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr = serializers.DecimalField( source='trr_percentile', allow_null=True,",
"allow_null=True, read_only=True, max_digits=6, decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField() rank",
"def get_unit_description(self, obj): return obj.unit_description if obj.unit_description else '' def get_rank(self, obj): return",
"obj.point.x, 'lat': obj.point.y } except AttributeError: return None class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr = serializers.DecimalField(",
"serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() crid = serializers.CharField() category",
"= serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True) def get_percentiles(self, obj): yearly_percentiles = obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data",
"return obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self, obj): try: return { 'lon': obj.point.x, 'lat': obj.point.y }",
"get_kind(self, obj): raise NotImplementedError def get_priority_sort(self, obj): raise NotImplementedError def get_unit_name(self, obj): return",
"serializers.NullBooleanField() firearm_used = serializers.NullBooleanField() point = serializers.SerializerMethodField() def get_kind(self, obj): return 'FORCE' def",
"finding = serializers.CharField(source='final_finding_display') outcome = serializers.CharField(source='final_outcome') coaccused = serializers.IntegerField(source='coaccused_count') attachments = serializers.SerializerMethodField() point",
"} except AttributeError: return None class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr = serializers.DecimalField( source='trr_percentile', allow_null=True, read_only=True,",
"serializers from shared.serializer import NoNullSerializer, OfficerPercentileSerializer, OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer): unit_id = serializers.IntegerField(source='id') unit_name",
"serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer): officer_id = serializers.IntegerField(source='id') full_name = serializers.CharField() unit = PoliceUnitMobileSerializer(source='last_unit') date_of_appt",
"obj): try: return { 'lon': obj.allegation.point.x, 'lat': obj.allegation.point.y } except AttributeError: return None",
"obj): return 'UNIT_CHANGE' def get_priority_sort(self, obj): return 20 class VictimMobileSerializer(NoNullSerializer): gender = serializers.CharField(source='gender_display')",
"serializers.CharField() rank = serializers.CharField() coaccusal_count = serializers.IntegerField() class OfficerCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name",
"serializers.CharField() def get_kind(self, obj): return 'AWARD' def get_priority_sort(self, obj): return 40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer):",
"= serializers.DecimalField( source='complaint_percentile', max_digits=6, decimal_places=4, allow_null=True ) percentile_trr = serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4, allow_null=True)",
"unit = PoliceUnitMobileSerializer(source='last_unit') date_of_appt = serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation = serializers.DateField(source='resignation_date', format='%Y-%m-%d') active =",
"class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='spp_date', format=None) date = serializers.DateField(source='spp_date', format='%Y-%m-%d') def get_kind(self, obj):",
"serializers.DateField(source='start_date', format=None) date = serializers.DateField(source='start_date', format='%Y-%m-%d') award_type = serializers.CharField() def get_kind(self, obj): return",
"format='%Y-%m-%d') active = serializers.SerializerMethodField() rank = serializers.CharField() race = serializers.CharField() birth_year = serializers.IntegerField()",
"JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='appointed_date', format=None) date = serializers.DateField(source='appointed_date', format='%Y-%m-%d') def get_kind(self, obj): return",
"get_point(self, obj): try: return { 'lon': obj.point.x, 'lat': obj.point.y } except AttributeError: return",
"max_digits=6, decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField() rank = serializers.CharField()",
"'UNIT_CHANGE' def get_priority_sort(self, obj): return 20 class VictimMobileSerializer(NoNullSerializer): gender = serializers.CharField(source='gender_display') race =",
"max_digits=6, decimal_places=4) percentile_allegation_civilian = serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_internal = serializers.DecimalField(",
"serializers.CharField() id = serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() crid",
"get_rank(self, obj): return obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='appointed_date', format=None) date = serializers.DateField(source='appointed_date',",
"civilian_compliment_count = serializers.IntegerField() trr_count = serializers.IntegerField() major_award_count = serializers.IntegerField() honorable_mention_percentile = serializers.DecimalField(max_digits=6, decimal_places=4,",
"def get_priority_sort(self, obj): return 50 def get_date_sort(self, obj): return obj.trr_datetime.date() def get_date(self, obj):",
"coaccusal_count = serializers.IntegerField() class OfficerCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField() complaint_count =",
"full_name = serializers.CharField() rank = serializers.CharField() coaccusal_count = serializers.IntegerField() class OfficerCardMobileSerializer(OfficerPercentileSerializer): id =",
"obj): return 40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id = serializers.IntegerField(source='id') date_sort = serializers.SerializerMethodField() date =",
"get_attachments(self, obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='start_date', format=None) date =",
"AttributeError: return None class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr = serializers.DecimalField( source='trr_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4)",
"serializers.IntegerField() badge = serializers.SerializerMethodField() historic_badges = serializers.ListField(child=serializers.CharField()) gender = serializers.CharField(source='gender_display') percentiles = serializers.SerializerMethodField()",
"obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self, obj): return obj.category if obj.category else 'Unknown' def",
"serializers.DateField(source='appointed_date', format='%Y-%m-%d') def get_kind(self, obj): return 'JOINED' def get_priority_sort(self, obj): return 10 class",
"serializers.CharField() finding = serializers.CharField(source='final_finding_display') outcome = serializers.CharField(source='final_outcome') coaccused = serializers.IntegerField(source='coaccused_count') attachments = serializers.SerializerMethodField()",
"= serializers.CharField() rank = serializers.CharField() coaccusal_count = serializers.IntegerField() class OfficerCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField()",
"max_digits=6, decimal_places=4, allow_null=True ) percentile_trr = serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4, allow_null=True) honorable_mention_count = serializers.IntegerField()",
"return 25 def get_rank(self, obj): return obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='appointed_date', format=None)",
"= serializers.DateField(source='appointed_date', format=None) date = serializers.DateField(source='appointed_date', format='%Y-%m-%d') def get_kind(self, obj): return 'JOINED' def",
"serializers.SerializerMethodField() rank = serializers.SerializerMethodField() priority_sort = serializers.SerializerMethodField() kind = serializers.SerializerMethodField() def get_kind(self, obj):",
"raise NotImplementedError def get_unit_name(self, obj): return obj.unit_name if obj.unit_name else '' def get_unit_description(self,",
"serializers.SerializerMethodField() point = serializers.SerializerMethodField() victims = VictimMobileSerializer(many=True) def get_date_sort(self, obj): return obj.allegation.incident_date.date() def",
"unsustained_count = serializers.IntegerField() discipline_count = serializers.IntegerField() civilian_compliment_count = serializers.IntegerField() trr_count = serializers.IntegerField() major_award_count",
"= serializers.SerializerMethodField() point = serializers.SerializerMethodField() victims = VictimMobileSerializer(many=True) def get_date_sort(self, obj): return obj.allegation.incident_date.date()",
"obj): return 'JOINED' def get_priority_sort(self, obj): return 10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='effective_date',",
"'lat': obj.point.y } except AttributeError: return None class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr = serializers.DecimalField( source='trr_percentile',",
"25 def get_rank(self, obj): return obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='appointed_date', format=None) date",
"= serializers.CharField() url = serializers.CharField() preview_image_url = serializers.CharField() file_type = serializers.CharField() id =",
"coaccused = serializers.IntegerField(source='coaccused_count') attachments = serializers.SerializerMethodField() point = serializers.SerializerMethodField() victims = VictimMobileSerializer(many=True) def",
"obj): return obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='appointed_date', format=None) date = serializers.DateField(source='appointed_date', format='%Y-%m-%d')",
"obj): return 20 class VictimMobileSerializer(NoNullSerializer): gender = serializers.CharField(source='gender_display') race = serializers.CharField() age =",
"get_priority_sort(self, obj): return 40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id = serializers.IntegerField(source='id') date_sort = serializers.SerializerMethodField() date",
"age = serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer): title = serializers.CharField() url = serializers.CharField() preview_image_url =",
"= serializers.IntegerField(source='id') date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() taser = serializers.NullBooleanField() firearm_used =",
"obj.trr_datetime.date() def get_date(self, obj): return obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self, obj): try: return { 'lon':",
"allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_internal = serializers.DecimalField( source='internal_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) class",
"= serializers.SerializerMethodField() historic_badges = serializers.ListField(child=serializers.CharField()) gender = serializers.CharField(source='gender_display') percentiles = serializers.SerializerMethodField() allegation_count =",
"crid = serializers.CharField() category = serializers.SerializerMethodField() subcategory = serializers.CharField() finding = serializers.CharField(source='final_finding_display') outcome",
"return obj.allegation.incident_date.date() def get_date(self, obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self, obj): return obj.category if",
"= PoliceUnitMobileSerializer(source='last_unit') date_of_appt = serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation = serializers.DateField(source='resignation_date', format='%Y-%m-%d') active = serializers.SerializerMethodField()",
"serializers.CharField() coaccusal_count = serializers.IntegerField() class OfficerCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField() complaint_count",
"serializers.DateField(source='appointed_date', format=None) date = serializers.DateField(source='appointed_date', format='%Y-%m-%d') def get_kind(self, obj): return 'JOINED' def get_priority_sort(self,",
"race = serializers.CharField() birth_year = serializers.IntegerField() badge = serializers.SerializerMethodField() historic_badges = serializers.ListField(child=serializers.CharField()) gender",
"= serializers.SerializerMethodField() allegation_count = serializers.IntegerField() percentile_allegation = serializers.DecimalField( source='complaint_percentile', max_digits=6, decimal_places=4, allow_null=True )",
"serializers.SerializerMethodField() kind = serializers.SerializerMethodField() def get_kind(self, obj): raise NotImplementedError def get_priority_sort(self, obj): raise",
"'' def get_rank(self, obj): return obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='spp_date', format=None) date",
"get_priority_sort(self, obj): return 25 def get_rank(self, obj): return obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort =",
"rank = serializers.CharField() coaccusal_count = serializers.IntegerField() class OfficerCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name =",
"source='internal_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField()",
"serializers.CharField() url = serializers.CharField() preview_image_url = serializers.CharField() file_type = serializers.CharField() id = serializers.CharField()",
"get_date_sort(self, obj): return obj.allegation.incident_date.date() def get_date(self, obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self, obj): return",
"def get_priority_sort(self, obj): return 10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='effective_date', format=None) date =",
"= serializers.CharField() category = serializers.SerializerMethodField() subcategory = serializers.CharField() finding = serializers.CharField(source='final_finding_display') outcome =",
"def get_kind(self, obj): return 'AWARD' def get_priority_sort(self, obj): return 40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id",
"serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4, allow_null=True) honorable_mention_count = serializers.IntegerField() sustained_count = serializers.IntegerField() unsustained_count = serializers.IntegerField()",
"or '' class BaseTimelineMobileSerializer(NoNullSerializer): unit_name = serializers.SerializerMethodField() unit_description = serializers.SerializerMethodField() rank = serializers.SerializerMethodField()",
"= serializers.CharField(source='final_finding_display') outcome = serializers.CharField(source='final_outcome') coaccused = serializers.IntegerField(source='coaccused_count') attachments = serializers.SerializerMethodField() point =",
"return None def get_attachments(self, obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='start_date',",
"'' class BaseTimelineMobileSerializer(NoNullSerializer): unit_name = serializers.SerializerMethodField() unit_description = serializers.SerializerMethodField() rank = serializers.SerializerMethodField() priority_sort",
"= serializers.DateField(source='start_date', format='%Y-%m-%d') award_type = serializers.CharField() def get_kind(self, obj): return 'AWARD' def get_priority_sort(self,",
"def get_date_sort(self, obj): return obj.trr_datetime.date() def get_date(self, obj): return obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self, obj):",
"decimal_places=4) percentile_allegation_internal = serializers.DecimalField( source='internal_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id =",
"date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() crid = serializers.CharField() category = serializers.SerializerMethodField() subcategory",
"OfficerPercentileSerializer, OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer): unit_id = serializers.IntegerField(source='id') unit_name = serializers.CharField() description = serializers.CharField()",
"import serializers from shared.serializer import NoNullSerializer, OfficerPercentileSerializer, OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer): unit_id = serializers.IntegerField(source='id')",
"birth_year = serializers.IntegerField() badge = serializers.SerializerMethodField() historic_badges = serializers.ListField(child=serializers.CharField()) gender = serializers.CharField(source='gender_display') percentiles",
"serializers.SerializerMethodField() historic_badges = serializers.ListField(child=serializers.CharField()) gender = serializers.CharField(source='gender_display') percentiles = serializers.SerializerMethodField() allegation_count = serializers.IntegerField()",
"date_sort = serializers.DateField(source='appointed_date', format=None) date = serializers.DateField(source='appointed_date', format='%Y-%m-%d') def get_kind(self, obj): return 'JOINED'",
"= serializers.SerializerMethodField() crid = serializers.CharField() category = serializers.SerializerMethodField() subcategory = serializers.CharField() finding =",
"return { 'lon': obj.allegation.point.x, 'lat': obj.allegation.point.y } except AttributeError: return None def get_attachments(self,",
"many=True).data def get_active(self, obj): return obj.get_active_display() def get_badge(self, obj): return obj.current_badge or ''",
"date = serializers.DateField(source='appointed_date', format='%Y-%m-%d') def get_kind(self, obj): return 'JOINED' def get_priority_sort(self, obj): return",
"many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='start_date', format=None) date = serializers.DateField(source='start_date', format='%Y-%m-%d') award_type =",
"def get_kind(self, obj): raise NotImplementedError def get_priority_sort(self, obj): raise NotImplementedError def get_unit_name(self, obj):",
"return 10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='effective_date', format=None) date = serializers.DateField(source='effective_date', format='%Y-%m-%d') def",
"class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() crid = serializers.CharField() category =",
"serializers.CharField(source='final_outcome') coaccused = serializers.IntegerField(source='coaccused_count') attachments = serializers.SerializerMethodField() point = serializers.SerializerMethodField() victims = VictimMobileSerializer(many=True)",
"obj.allegation.point.x, 'lat': obj.allegation.point.y } except AttributeError: return None def get_attachments(self, obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments,",
"serializers.CharField() birth_year = serializers.IntegerField() badge = serializers.SerializerMethodField() historic_badges = serializers.ListField(child=serializers.CharField()) gender = serializers.CharField(source='gender_display')",
"get_kind(self, obj): return 'UNIT_CHANGE' def get_priority_sort(self, obj): return 20 class VictimMobileSerializer(NoNullSerializer): gender =",
"30 def get_point(self, obj): try: return { 'lon': obj.allegation.point.x, 'lat': obj.allegation.point.y } except",
"obj): try: return { 'lon': obj.point.x, 'lat': obj.point.y } except AttributeError: return None",
"def get_date_sort(self, obj): return obj.allegation.incident_date.date() def get_date(self, obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self, obj):",
"class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='effective_date', format=None) date = serializers.DateField(source='effective_date', format='%Y-%m-%d') def get_kind(self, obj):",
"def get_badge(self, obj): return obj.current_badge or '' class BaseTimelineMobileSerializer(NoNullSerializer): unit_name = serializers.SerializerMethodField() unit_description",
"def get_active(self, obj): return obj.get_active_display() def get_badge(self, obj): return obj.current_badge or '' class",
"allow_null=True) honorable_mention_count = serializers.IntegerField() sustained_count = serializers.IntegerField() unsustained_count = serializers.IntegerField() discipline_count = serializers.IntegerField()",
"obj): return obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self, obj): try: return { 'lon': obj.point.x, 'lat': obj.point.y",
"format='%Y-%m-%d') award_type = serializers.CharField() def get_kind(self, obj): return 'AWARD' def get_priority_sort(self, obj): return",
"def get_priority_sort(self, obj): raise NotImplementedError def get_unit_name(self, obj): return obj.unit_name if obj.unit_name else",
"= serializers.IntegerField() class OfficerCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField() complaint_count = serializers.IntegerField(source='allegation_count')",
"= serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer): title = serializers.CharField() url = serializers.CharField() preview_image_url = serializers.CharField()",
"obj): yearly_percentiles = obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def get_active(self, obj): return obj.get_active_display() def",
"RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='spp_date', format=None) date = serializers.DateField(source='spp_date', format='%Y-%m-%d') def get_kind(self, obj): return",
"serializers.CharField() age = serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer): title = serializers.CharField() url = serializers.CharField() preview_image_url",
"serializers.IntegerField(source='id') full_name = serializers.CharField() unit = PoliceUnitMobileSerializer(source='last_unit') date_of_appt = serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation =",
"decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField() rank = serializers.CharField() coaccusal_count",
"= serializers.IntegerField() honorable_mention_percentile = serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True) def get_percentiles(self, obj): yearly_percentiles = obj.officeryearlypercentile_set.order_by('year')",
"get_active(self, obj): return obj.get_active_display() def get_badge(self, obj): return obj.current_badge or '' class BaseTimelineMobileSerializer(NoNullSerializer):",
"def get_rank(self, obj): return obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='spp_date', format=None) date =",
"= serializers.CharField() description = serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer): officer_id = serializers.IntegerField(source='id') full_name = serializers.CharField()",
"else '' def get_rank(self, obj): return obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='spp_date', format=None)",
"obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self, obj): return obj.category if obj.category else 'Unknown' def get_kind(self, obj):",
"return 'AWARD' def get_priority_sort(self, obj): return 40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id = serializers.IntegerField(source='id') date_sort",
"'Unknown' def get_kind(self, obj): return 'CR' def get_priority_sort(self, obj): return 30 def get_point(self,",
"unit_id = serializers.IntegerField(source='id') unit_name = serializers.CharField() description = serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer): officer_id =",
"obj.current_badge or '' class BaseTimelineMobileSerializer(NoNullSerializer): unit_name = serializers.SerializerMethodField() unit_description = serializers.SerializerMethodField() rank =",
"get_kind(self, obj): return 'RANK_CHANGE' def get_priority_sort(self, obj): return 25 def get_rank(self, obj): return",
"serializers.CharField(source='final_finding_display') outcome = serializers.CharField(source='final_outcome') coaccused = serializers.IntegerField(source='coaccused_count') attachments = serializers.SerializerMethodField() point = serializers.SerializerMethodField()",
"award_type = serializers.CharField() def get_kind(self, obj): return 'AWARD' def get_priority_sort(self, obj): return 40",
"UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='effective_date', format=None) date = serializers.DateField(source='effective_date', format='%Y-%m-%d') def get_kind(self, obj): return",
"get_date(self, obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self, obj): return obj.category if obj.category else 'Unknown'",
"serializers.IntegerField() sustained_count = serializers.IntegerField() unsustained_count = serializers.IntegerField() discipline_count = serializers.IntegerField() civilian_compliment_count = serializers.IntegerField()",
"get_unit_name(self, obj): return obj.unit_name if obj.unit_name else '' def get_unit_description(self, obj): return obj.unit_description",
"trr_count = serializers.IntegerField() major_award_count = serializers.IntegerField() honorable_mention_percentile = serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True) def get_percentiles(self,",
"= serializers.SerializerMethodField() unit_description = serializers.SerializerMethodField() rank = serializers.SerializerMethodField() priority_sort = serializers.SerializerMethodField() kind =",
"AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='start_date', format=None) date = serializers.DateField(source='start_date', format='%Y-%m-%d') award_type",
"attachments = serializers.SerializerMethodField() point = serializers.SerializerMethodField() victims = VictimMobileSerializer(many=True) def get_date_sort(self, obj): return",
"read_only=True, max_digits=6, decimal_places=4) percentile_allegation_internal = serializers.DecimalField( source='internal_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer):",
"class OfficerInfoMobileSerializer(NoNullSerializer): officer_id = serializers.IntegerField(source='id') full_name = serializers.CharField() unit = PoliceUnitMobileSerializer(source='last_unit') date_of_appt =",
"rank = serializers.CharField() race = serializers.CharField() birth_year = serializers.IntegerField() badge = serializers.SerializerMethodField() historic_badges",
"NotImplementedError def get_unit_name(self, obj): return obj.unit_name if obj.unit_name else '' def get_unit_description(self, obj):",
"OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer): unit_id = serializers.IntegerField(source='id') unit_name = serializers.CharField() description = serializers.CharField() class",
"CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() crid = serializers.CharField() category = serializers.SerializerMethodField()",
"= VictimMobileSerializer(many=True) def get_date_sort(self, obj): return obj.allegation.incident_date.date() def get_date(self, obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def",
"import NoNullSerializer, OfficerPercentileSerializer, OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer): unit_id = serializers.IntegerField(source='id') unit_name = serializers.CharField() description",
"= serializers.SerializerMethodField() date = serializers.SerializerMethodField() taser = serializers.NullBooleanField() firearm_used = serializers.NullBooleanField() point =",
"return None class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr = serializers.DecimalField( source='trr_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_civilian",
"read_only=True, max_digits=6, decimal_places=4) percentile_allegation_civilian = serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_internal =",
"'FORCE' def get_priority_sort(self, obj): return 50 def get_date_sort(self, obj): return obj.trr_datetime.date() def get_date(self,",
"serializers.IntegerField() unsustained_count = serializers.IntegerField() discipline_count = serializers.IntegerField() civilian_compliment_count = serializers.IntegerField() trr_count = serializers.IntegerField()",
"obj.unit_name if obj.unit_name else '' def get_unit_description(self, obj): return obj.unit_description if obj.unit_description else",
"obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self, obj): try: return { 'lon': obj.point.x, 'lat': obj.point.y } except",
"obj): return 50 def get_date_sort(self, obj): return obj.trr_datetime.date() def get_date(self, obj): return obj.trr_datetime.date().strftime('%Y-%m-%d')",
"return 'JOINED' def get_priority_sort(self, obj): return 10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='effective_date', format=None)",
"= serializers.DateField(source='effective_date', format='%Y-%m-%d') def get_kind(self, obj): return 'UNIT_CHANGE' def get_priority_sort(self, obj): return 20",
"serializers.IntegerField(source='id') date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() taser = serializers.NullBooleanField() firearm_used = serializers.NullBooleanField()",
"serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_internal = serializers.DecimalField( source='internal_allegation_percentile', allow_null=True, read_only=True, max_digits=6,",
"serializers.CharField(source='gender_display') percentiles = serializers.SerializerMethodField() allegation_count = serializers.IntegerField() percentile_allegation = serializers.DecimalField( source='complaint_percentile', max_digits=6, decimal_places=4,",
"obj.allegation.incident_date.date() def get_date(self, obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self, obj): return obj.category if obj.category",
"format=None) date = serializers.DateField(source='spp_date', format='%Y-%m-%d') def get_kind(self, obj): return 'RANK_CHANGE' def get_priority_sort(self, obj):",
"serializers.SerializerMethodField() subcategory = serializers.CharField() finding = serializers.CharField(source='final_finding_display') outcome = serializers.CharField(source='final_outcome') coaccused = serializers.IntegerField(source='coaccused_count')",
"def get_kind(self, obj): return 'FORCE' def get_priority_sort(self, obj): return 50 def get_date_sort(self, obj):",
"get_priority_sort(self, obj): return 50 def get_date_sort(self, obj): return obj.trr_datetime.date() def get_date(self, obj): return",
"class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField() rank = serializers.CharField() coaccusal_count =",
"date = serializers.SerializerMethodField() crid = serializers.CharField() category = serializers.SerializerMethodField() subcategory = serializers.CharField() finding",
"def get_point(self, obj): try: return { 'lon': obj.allegation.point.x, 'lat': obj.allegation.point.y } except AttributeError:",
"'JOINED' def get_priority_sort(self, obj): return 10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='effective_date', format=None) date",
"victims = VictimMobileSerializer(many=True) def get_date_sort(self, obj): return obj.allegation.incident_date.date() def get_date(self, obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d')",
"return obj.get_active_display() def get_badge(self, obj): return obj.current_badge or '' class BaseTimelineMobileSerializer(NoNullSerializer): unit_name =",
"gender = serializers.CharField(source='gender_display') race = serializers.CharField() age = serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer): title =",
"category = serializers.SerializerMethodField() subcategory = serializers.CharField() finding = serializers.CharField(source='final_finding_display') outcome = serializers.CharField(source='final_outcome') coaccused",
"= serializers.IntegerField(source='id') full_name = serializers.CharField() unit = PoliceUnitMobileSerializer(source='last_unit') date_of_appt = serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation",
"active = serializers.SerializerMethodField() rank = serializers.CharField() race = serializers.CharField() birth_year = serializers.IntegerField() badge",
"source='complaint_percentile', max_digits=6, decimal_places=4, allow_null=True ) percentile_trr = serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4, allow_null=True) honorable_mention_count =",
"def get_kind(self, obj): return 'RANK_CHANGE' def get_priority_sort(self, obj): return 25 def get_rank(self, obj):",
"= serializers.CharField() preview_image_url = serializers.CharField() file_type = serializers.CharField() id = serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer):",
"class AttachmentFileMobileSerializer(NoNullSerializer): title = serializers.CharField() url = serializers.CharField() preview_image_url = serializers.CharField() file_type =",
"serializers.NullBooleanField() point = serializers.SerializerMethodField() def get_kind(self, obj): return 'FORCE' def get_priority_sort(self, obj): return",
"serializers.DateField(source='spp_date', format=None) date = serializers.DateField(source='spp_date', format='%Y-%m-%d') def get_kind(self, obj): return 'RANK_CHANGE' def get_priority_sort(self,",
"return obj.trr_datetime.date() def get_date(self, obj): return obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self, obj): try: return {",
"obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='start_date', format=None) date = serializers.DateField(source='start_date',",
"date_sort = serializers.DateField(source='effective_date', format=None) date = serializers.DateField(source='effective_date', format='%Y-%m-%d') def get_kind(self, obj): return 'UNIT_CHANGE'",
"= serializers.NullBooleanField() point = serializers.SerializerMethodField() def get_kind(self, obj): return 'FORCE' def get_priority_sort(self, obj):",
"def get_kind(self, obj): return 'CR' def get_priority_sort(self, obj): return 30 def get_point(self, obj):",
"honorable_mention_percentile = serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True) def get_percentiles(self, obj): yearly_percentiles = obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles,",
"obj): return obj.category if obj.category else 'Unknown' def get_kind(self, obj): return 'CR' def",
"= serializers.IntegerField() unsustained_count = serializers.IntegerField() discipline_count = serializers.IntegerField() civilian_compliment_count = serializers.IntegerField() trr_count =",
"serializers.IntegerField() major_award_count = serializers.IntegerField() honorable_mention_percentile = serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True) def get_percentiles(self, obj): yearly_percentiles",
"return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def get_active(self, obj): return obj.get_active_display() def get_badge(self, obj): return obj.current_badge",
"BaseTimelineMobileSerializer(NoNullSerializer): unit_name = serializers.SerializerMethodField() unit_description = serializers.SerializerMethodField() rank = serializers.SerializerMethodField() priority_sort = serializers.SerializerMethodField()",
"race = serializers.CharField() age = serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer): title = serializers.CharField() url =",
"serializers.DecimalField( source='trr_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_civilian = serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True, read_only=True, max_digits=6,",
"class PoliceUnitMobileSerializer(NoNullSerializer): unit_id = serializers.IntegerField(source='id') unit_name = serializers.CharField() description = serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer):",
"file_type = serializers.CharField() id = serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.SerializerMethodField() date =",
"officer_id = serializers.IntegerField(source='id') full_name = serializers.CharField() unit = PoliceUnitMobileSerializer(source='last_unit') date_of_appt = serializers.DateField(source='appointed_date', format='%Y-%m-%d')",
"return 'FORCE' def get_priority_sort(self, obj): return 50 def get_date_sort(self, obj): return obj.trr_datetime.date() def",
"= serializers.CharField() finding = serializers.CharField(source='final_finding_display') outcome = serializers.CharField(source='final_outcome') coaccused = serializers.IntegerField(source='coaccused_count') attachments =",
"get_category(self, obj): return obj.category if obj.category else 'Unknown' def get_kind(self, obj): return 'CR'",
"serializers.IntegerField() full_name = serializers.CharField() rank = serializers.CharField() coaccusal_count = serializers.IntegerField() class OfficerCardMobileSerializer(OfficerPercentileSerializer): id",
"unit_description = serializers.SerializerMethodField() rank = serializers.SerializerMethodField() priority_sort = serializers.SerializerMethodField() kind = serializers.SerializerMethodField() def",
"= serializers.CharField() birth_year = serializers.IntegerField() badge = serializers.SerializerMethodField() historic_badges = serializers.ListField(child=serializers.CharField()) gender =",
"def get_unit_name(self, obj): return obj.unit_name if obj.unit_name else '' def get_unit_description(self, obj): return",
"VictimMobileSerializer(many=True) def get_date_sort(self, obj): return obj.allegation.incident_date.date() def get_date(self, obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self,",
"get_kind(self, obj): return 'AWARD' def get_priority_sort(self, obj): return 40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id =",
"read_only=True, max_digits=6, decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField() rank =",
"= serializers.IntegerField() discipline_count = serializers.IntegerField() civilian_compliment_count = serializers.IntegerField() trr_count = serializers.IntegerField() major_award_count =",
"obj): return 'CR' def get_priority_sort(self, obj): return 30 def get_point(self, obj): try: return",
"= serializers.DecimalField( source='internal_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name",
"obj): raise NotImplementedError def get_unit_name(self, obj): return obj.unit_name if obj.unit_name else '' def",
"40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id = serializers.IntegerField(source='id') date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() taser",
"} except AttributeError: return None def get_attachments(self, obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer):",
"return obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='spp_date', format=None) date = serializers.DateField(source='spp_date', format='%Y-%m-%d') def",
"serializers.CharField() category = serializers.SerializerMethodField() subcategory = serializers.CharField() finding = serializers.CharField(source='final_finding_display') outcome = serializers.CharField(source='final_outcome')",
"'lat': obj.allegation.point.y } except AttributeError: return None def get_attachments(self, obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data",
"url = serializers.CharField() preview_image_url = serializers.CharField() file_type = serializers.CharField() id = serializers.CharField() class",
"= serializers.DateField(source='spp_date', format='%Y-%m-%d') def get_kind(self, obj): return 'RANK_CHANGE' def get_priority_sort(self, obj): return 25",
"date = serializers.DateField(source='effective_date', format='%Y-%m-%d') def get_kind(self, obj): return 'UNIT_CHANGE' def get_priority_sort(self, obj): return",
"return 'UNIT_CHANGE' def get_priority_sort(self, obj): return 20 class VictimMobileSerializer(NoNullSerializer): gender = serializers.CharField(source='gender_display') race",
"10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='effective_date', format=None) date = serializers.DateField(source='effective_date', format='%Y-%m-%d') def get_kind(self,",
"format=None) date = serializers.DateField(source='start_date', format='%Y-%m-%d') award_type = serializers.CharField() def get_kind(self, obj): return 'AWARD'",
"get_point(self, obj): try: return { 'lon': obj.allegation.point.x, 'lat': obj.allegation.point.y } except AttributeError: return",
"return obj.unit_name if obj.unit_name else '' def get_unit_description(self, obj): return obj.unit_description if obj.unit_description",
"= serializers.CharField() coaccusal_count = serializers.IntegerField() class OfficerCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name = serializers.CharField()",
"serializers.DateField(source='effective_date', format=None) date = serializers.DateField(source='effective_date', format='%Y-%m-%d') def get_kind(self, obj): return 'UNIT_CHANGE' def get_priority_sort(self,",
"VictimMobileSerializer(NoNullSerializer): gender = serializers.CharField(source='gender_display') race = serializers.CharField() age = serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer): title",
"percentile_allegation_civilian = serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_internal = serializers.DecimalField( source='internal_allegation_percentile', allow_null=True,",
"= serializers.NullBooleanField() firearm_used = serializers.NullBooleanField() point = serializers.SerializerMethodField() def get_kind(self, obj): return 'FORCE'",
"class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id = serializers.IntegerField(source='id') date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() taser =",
"= serializers.IntegerField() sustained_count = serializers.IntegerField() unsustained_count = serializers.IntegerField() discipline_count = serializers.IntegerField() civilian_compliment_count =",
"allegation_count = serializers.IntegerField() percentile_allegation = serializers.DecimalField( source='complaint_percentile', max_digits=6, decimal_places=4, allow_null=True ) percentile_trr =",
"= serializers.SerializerMethodField() subcategory = serializers.CharField() finding = serializers.CharField(source='final_finding_display') outcome = serializers.CharField(source='final_outcome') coaccused =",
"try: return { 'lon': obj.point.x, 'lat': obj.point.y } except AttributeError: return None class",
"if obj.unit_description else '' def get_rank(self, obj): return obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort =",
"raise NotImplementedError def get_priority_sort(self, obj): raise NotImplementedError def get_unit_name(self, obj): return obj.unit_name if",
"get_rank(self, obj): return obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='spp_date', format=None) date = serializers.DateField(source='spp_date',",
"TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id = serializers.IntegerField(source='id') date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() taser = serializers.NullBooleanField()",
"allow_null=True) def get_percentiles(self, obj): yearly_percentiles = obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def get_active(self, obj):",
"serializers.IntegerField(source='id') unit_name = serializers.CharField() description = serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer): officer_id = serializers.IntegerField(source='id') full_name",
"obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='spp_date', format=None) date = serializers.DateField(source='spp_date', format='%Y-%m-%d') def get_kind(self,",
"= serializers.IntegerField(source='id') unit_name = serializers.CharField() description = serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer): officer_id = serializers.IntegerField(source='id')",
"= serializers.DecimalField( source='trr_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_civilian = serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True, read_only=True,",
"serializers.IntegerField() civilian_compliment_count = serializers.IntegerField() trr_count = serializers.IntegerField() major_award_count = serializers.IntegerField() honorable_mention_percentile = serializers.DecimalField(max_digits=6,",
"full_name = serializers.CharField() unit = PoliceUnitMobileSerializer(source='last_unit') date_of_appt = serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation = serializers.DateField(source='resignation_date',",
"= serializers.IntegerField() major_award_count = serializers.IntegerField() honorable_mention_percentile = serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True) def get_percentiles(self, obj):",
"obj.get_active_display() def get_badge(self, obj): return obj.current_badge or '' class BaseTimelineMobileSerializer(NoNullSerializer): unit_name = serializers.SerializerMethodField()",
"<filename>cpdb/officers/serializers/response_mobile_serializers.py from rest_framework import serializers from shared.serializer import NoNullSerializer, OfficerPercentileSerializer, OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer):",
"return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self, obj): return obj.category if obj.category else 'Unknown' def get_kind(self,",
"shared.serializer import NoNullSerializer, OfficerPercentileSerializer, OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer): unit_id = serializers.IntegerField(source='id') unit_name = serializers.CharField()",
"= serializers.IntegerField(source='coaccused_count') attachments = serializers.SerializerMethodField() point = serializers.SerializerMethodField() victims = VictimMobileSerializer(many=True) def get_date_sort(self,",
"= serializers.SerializerMethodField() rank = serializers.CharField() race = serializers.CharField() birth_year = serializers.IntegerField() badge =",
"def get_date(self, obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self, obj): return obj.category if obj.category else",
"def get_priority_sort(self, obj): return 20 class VictimMobileSerializer(NoNullSerializer): gender = serializers.CharField(source='gender_display') race = serializers.CharField()",
"decimal_places=4, allow_null=True ) percentile_trr = serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4, allow_null=True) honorable_mention_count = serializers.IntegerField() sustained_count",
"source='trr_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_civilian = serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4)",
"get_kind(self, obj): return 'JOINED' def get_priority_sort(self, obj): return 10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort =",
"rank = serializers.SerializerMethodField() priority_sort = serializers.SerializerMethodField() kind = serializers.SerializerMethodField() def get_kind(self, obj): raise",
"else 'Unknown' def get_kind(self, obj): return 'CR' def get_priority_sort(self, obj): return 30 def",
"trr_id = serializers.IntegerField(source='id') date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() taser = serializers.NullBooleanField() firearm_used",
"obj): return obj.current_badge or '' class BaseTimelineMobileSerializer(NoNullSerializer): unit_name = serializers.SerializerMethodField() unit_description = serializers.SerializerMethodField()",
"major_award_count = serializers.IntegerField() honorable_mention_percentile = serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True) def get_percentiles(self, obj): yearly_percentiles =",
"def get_date(self, obj): return obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self, obj): try: return { 'lon': obj.point.x,",
"get_kind(self, obj): return 'FORCE' def get_priority_sort(self, obj): return 50 def get_date_sort(self, obj): return",
"get_date(self, obj): return obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self, obj): try: return { 'lon': obj.point.x, 'lat':",
"= serializers.IntegerField() civilian_compliment_count = serializers.IntegerField() trr_count = serializers.IntegerField() major_award_count = serializers.IntegerField() honorable_mention_percentile =",
"serializers.DateField(source='effective_date', format='%Y-%m-%d') def get_kind(self, obj): return 'UNIT_CHANGE' def get_priority_sort(self, obj): return 20 class",
"= serializers.SerializerMethodField() date = serializers.SerializerMethodField() crid = serializers.CharField() category = serializers.SerializerMethodField() subcategory =",
"obj): return 'AWARD' def get_priority_sort(self, obj): return 40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id = serializers.IntegerField(source='id')",
"get_date_sort(self, obj): return obj.trr_datetime.date() def get_date(self, obj): return obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self, obj): try:",
"= serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() crid = serializers.CharField()",
"percentile_allegation_internal = serializers.DecimalField( source='internal_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField()",
"else '' def get_unit_description(self, obj): return obj.unit_description if obj.unit_description else '' def get_rank(self,",
"def get_priority_sort(self, obj): return 25 def get_rank(self, obj): return obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort",
"obj): return obj.unit_name if obj.unit_name else '' def get_unit_description(self, obj): return obj.unit_description if",
"= serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation = serializers.DateField(source='resignation_date', format='%Y-%m-%d') active = serializers.SerializerMethodField() rank = serializers.CharField()",
"except AttributeError: return None class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr = serializers.DecimalField( source='trr_percentile', allow_null=True, read_only=True, max_digits=6,",
"percentile_trr = serializers.DecimalField( source='trr_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_civilian = serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True,",
"= serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_internal = serializers.DecimalField( source='internal_allegation_percentile', allow_null=True, read_only=True,",
"= serializers.DateField(source='start_date', format=None) date = serializers.DateField(source='start_date', format='%Y-%m-%d') award_type = serializers.CharField() def get_kind(self, obj):",
"date = serializers.SerializerMethodField() taser = serializers.NullBooleanField() firearm_used = serializers.NullBooleanField() point = serializers.SerializerMethodField() def",
"from rest_framework import serializers from shared.serializer import NoNullSerializer, OfficerPercentileSerializer, OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer): unit_id",
"serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True) def get_percentiles(self, obj): yearly_percentiles = obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def",
"def get_rank(self, obj): return obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='appointed_date', format=None) date =",
"'CR' def get_priority_sort(self, obj): return 30 def get_point(self, obj): try: return { 'lon':",
"return obj.current_badge or '' class BaseTimelineMobileSerializer(NoNullSerializer): unit_name = serializers.SerializerMethodField() unit_description = serializers.SerializerMethodField() rank",
"get_priority_sort(self, obj): return 10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='effective_date', format=None) date = serializers.DateField(source='effective_date',",
"= serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4, allow_null=True) honorable_mention_count = serializers.IntegerField() sustained_count = serializers.IntegerField() unsustained_count =",
"class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr = serializers.DecimalField( source='trr_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_civilian = serializers.DecimalField(",
"obj.unit_name else '' def get_unit_description(self, obj): return obj.unit_description if obj.unit_description else '' def",
"decimal_places=4) percentile_allegation_civilian = serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_internal = serializers.DecimalField( source='internal_allegation_percentile',",
"= serializers.SerializerMethodField() kind = serializers.SerializerMethodField() def get_kind(self, obj): raise NotImplementedError def get_priority_sort(self, obj):",
"= serializers.CharField() race = serializers.CharField() birth_year = serializers.IntegerField() badge = serializers.SerializerMethodField() historic_badges =",
"gender = serializers.CharField(source='gender_display') percentiles = serializers.SerializerMethodField() allegation_count = serializers.IntegerField() percentile_allegation = serializers.DecimalField( source='complaint_percentile',",
"= obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def get_active(self, obj): return obj.get_active_display() def get_badge(self, obj):",
"if obj.unit_name else '' def get_unit_description(self, obj): return obj.unit_description if obj.unit_description else ''",
"def get_priority_sort(self, obj): return 30 def get_point(self, obj): try: return { 'lon': obj.allegation.point.x,",
"get_percentiles(self, obj): yearly_percentiles = obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def get_active(self, obj): return obj.get_active_display()",
"AttributeError: return None def get_attachments(self, obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort =",
"serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation = serializers.DateField(source='resignation_date', format='%Y-%m-%d') active = serializers.SerializerMethodField() rank = serializers.CharField() race",
"OfficerInfoMobileSerializer(NoNullSerializer): officer_id = serializers.IntegerField(source='id') full_name = serializers.CharField() unit = PoliceUnitMobileSerializer(source='last_unit') date_of_appt = serializers.DateField(source='appointed_date',",
"50 def get_date_sort(self, obj): return obj.trr_datetime.date() def get_date(self, obj): return obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self,",
"return 50 def get_date_sort(self, obj): return obj.trr_datetime.date() def get_date(self, obj): return obj.trr_datetime.date().strftime('%Y-%m-%d') def",
"max_digits=6, decimal_places=4, allow_null=True) honorable_mention_count = serializers.IntegerField() sustained_count = serializers.IntegerField() unsustained_count = serializers.IntegerField() discipline_count",
"= serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer): officer_id = serializers.IntegerField(source='id') full_name = serializers.CharField() unit = PoliceUnitMobileSerializer(source='last_unit')",
"get_unit_description(self, obj): return obj.unit_description if obj.unit_description else '' def get_rank(self, obj): return obj.rank_name",
"obj): return obj.unit_description if obj.unit_description else '' def get_rank(self, obj): return obj.rank_name class",
"'lon': obj.allegation.point.x, 'lat': obj.allegation.point.y } except AttributeError: return None def get_attachments(self, obj): return",
"= serializers.CharField() id = serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField()",
"obj): raise NotImplementedError def get_priority_sort(self, obj): raise NotImplementedError def get_unit_name(self, obj): return obj.unit_name",
"rest_framework import serializers from shared.serializer import NoNullSerializer, OfficerPercentileSerializer, OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer): unit_id =",
"decimal_places=4, allow_null=True) def get_percentiles(self, obj): yearly_percentiles = obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def get_active(self,",
"serializers.CharField() file_type = serializers.CharField() id = serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.SerializerMethodField() date",
"= serializers.DateField(source='appointed_date', format='%Y-%m-%d') def get_kind(self, obj): return 'JOINED' def get_priority_sort(self, obj): return 10",
"serializers.IntegerField() percentile_allegation = serializers.DecimalField( source='complaint_percentile', max_digits=6, decimal_places=4, allow_null=True ) percentile_trr = serializers.DecimalField(source='trr_percentile', max_digits=6,",
"= serializers.CharField(source='gender_display') percentiles = serializers.SerializerMethodField() allegation_count = serializers.IntegerField() percentile_allegation = serializers.DecimalField( source='complaint_percentile', max_digits=6,",
"= serializers.DateField(source='resignation_date', format='%Y-%m-%d') active = serializers.SerializerMethodField() rank = serializers.CharField() race = serializers.CharField() birth_year",
"'' def get_unit_description(self, obj): return obj.unit_description if obj.unit_description else '' def get_rank(self, obj):",
"serializers.DecimalField( source='complaint_percentile', max_digits=6, decimal_places=4, allow_null=True ) percentile_trr = serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4, allow_null=True) honorable_mention_count",
"priority_sort = serializers.SerializerMethodField() kind = serializers.SerializerMethodField() def get_kind(self, obj): raise NotImplementedError def get_priority_sort(self,",
"serializers.SerializerMethodField() taser = serializers.NullBooleanField() firearm_used = serializers.NullBooleanField() point = serializers.SerializerMethodField() def get_kind(self, obj):",
"honorable_mention_count = serializers.IntegerField() sustained_count = serializers.IntegerField() unsustained_count = serializers.IntegerField() discipline_count = serializers.IntegerField() civilian_compliment_count",
"date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField() taser = serializers.NullBooleanField() firearm_used = serializers.NullBooleanField() point",
"= serializers.DateField(source='spp_date', format=None) date = serializers.DateField(source='spp_date', format='%Y-%m-%d') def get_kind(self, obj): return 'RANK_CHANGE' def",
"percentiles = serializers.SerializerMethodField() allegation_count = serializers.IntegerField() percentile_allegation = serializers.DecimalField( source='complaint_percentile', max_digits=6, decimal_places=4, allow_null=True",
"percentile_allegation = serializers.DecimalField( source='complaint_percentile', max_digits=6, decimal_places=4, allow_null=True ) percentile_trr = serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4,",
"NoNullSerializer, OfficerPercentileSerializer, OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer): unit_id = serializers.IntegerField(source='id') unit_name = serializers.CharField() description =",
"return 20 class VictimMobileSerializer(NoNullSerializer): gender = serializers.CharField(source='gender_display') race = serializers.CharField() age = serializers.IntegerField()",
"= serializers.SerializerMethodField() taser = serializers.NullBooleanField() firearm_used = serializers.NullBooleanField() point = serializers.SerializerMethodField() def get_kind(self,",
"serializers.DateField(source='resignation_date', format='%Y-%m-%d') active = serializers.SerializerMethodField() rank = serializers.CharField() race = serializers.CharField() birth_year =",
"obj): return obj.get_active_display() def get_badge(self, obj): return obj.current_badge or '' class BaseTimelineMobileSerializer(NoNullSerializer): unit_name",
"subcategory = serializers.CharField() finding = serializers.CharField(source='final_finding_display') outcome = serializers.CharField(source='final_outcome') coaccused = serializers.IntegerField(source='coaccused_count') attachments",
"return 'CR' def get_priority_sort(self, obj): return 30 def get_point(self, obj): try: return {",
"get_kind(self, obj): return 'CR' def get_priority_sort(self, obj): return 30 def get_point(self, obj): try:",
"def get_percentiles(self, obj): yearly_percentiles = obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def get_active(self, obj): return",
"if obj.category else 'Unknown' def get_kind(self, obj): return 'CR' def get_priority_sort(self, obj): return",
"obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='appointed_date', format=None) date = serializers.DateField(source='appointed_date', format='%Y-%m-%d') def get_kind(self,",
"serializers.SerializerMethodField() victims = VictimMobileSerializer(many=True) def get_date_sort(self, obj): return obj.allegation.incident_date.date() def get_date(self, obj): return",
"date_sort = serializers.DateField(source='start_date', format=None) date = serializers.DateField(source='start_date', format='%Y-%m-%d') award_type = serializers.CharField() def get_kind(self,",
"description = serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer): officer_id = serializers.IntegerField(source='id') full_name = serializers.CharField() unit =",
"serializers.DateField(source='start_date', format='%Y-%m-%d') award_type = serializers.CharField() def get_kind(self, obj): return 'AWARD' def get_priority_sort(self, obj):",
"return 30 def get_point(self, obj): try: return { 'lon': obj.allegation.point.x, 'lat': obj.allegation.point.y }",
"taser = serializers.NullBooleanField() firearm_used = serializers.NullBooleanField() point = serializers.SerializerMethodField() def get_kind(self, obj): return",
"return obj.unit_description if obj.unit_description else '' def get_rank(self, obj): return obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer):",
"serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer): title = serializers.CharField() url = serializers.CharField() preview_image_url = serializers.CharField() file_type",
"date_of_resignation = serializers.DateField(source='resignation_date', format='%Y-%m-%d') active = serializers.SerializerMethodField() rank = serializers.CharField() race = serializers.CharField()",
"obj): return 10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='effective_date', format=None) date = serializers.DateField(source='effective_date', format='%Y-%m-%d')",
"title = serializers.CharField() url = serializers.CharField() preview_image_url = serializers.CharField() file_type = serializers.CharField() id",
"= serializers.SerializerMethodField() victims = VictimMobileSerializer(many=True) def get_date_sort(self, obj): return obj.allegation.incident_date.date() def get_date(self, obj):",
"def get_attachments(self, obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='start_date', format=None) date",
"unit_name = serializers.CharField() description = serializers.CharField() class OfficerInfoMobileSerializer(NoNullSerializer): officer_id = serializers.IntegerField(source='id') full_name =",
"NotImplementedError def get_priority_sort(self, obj): raise NotImplementedError def get_unit_name(self, obj): return obj.unit_name if obj.unit_name",
"= serializers.SerializerMethodField() rank = serializers.SerializerMethodField() priority_sort = serializers.SerializerMethodField() kind = serializers.SerializerMethodField() def get_kind(self,",
"serializers.DateField(source='spp_date', format='%Y-%m-%d') def get_kind(self, obj): return 'RANK_CHANGE' def get_priority_sort(self, obj): return 25 def",
"yearly_percentiles = obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def get_active(self, obj): return obj.get_active_display() def get_badge(self,",
"obj): return 30 def get_point(self, obj): try: return { 'lon': obj.allegation.point.x, 'lat': obj.allegation.point.y",
"from shared.serializer import NoNullSerializer, OfficerPercentileSerializer, OfficerYearlyPercentileSerializer class PoliceUnitMobileSerializer(NoNullSerializer): unit_id = serializers.IntegerField(source='id') unit_name =",
"serializers.SerializerMethodField() date = serializers.SerializerMethodField() crid = serializers.CharField() category = serializers.SerializerMethodField() subcategory = serializers.CharField()",
"{ 'lon': obj.point.x, 'lat': obj.point.y } except AttributeError: return None class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr",
"obj.category if obj.category else 'Unknown' def get_kind(self, obj): return 'CR' def get_priority_sort(self, obj):",
"obj): return 'FORCE' def get_priority_sort(self, obj): return 50 def get_date_sort(self, obj): return obj.trr_datetime.date()",
"format=None) date = serializers.DateField(source='effective_date', format='%Y-%m-%d') def get_kind(self, obj): return 'UNIT_CHANGE' def get_priority_sort(self, obj):",
"historic_badges = serializers.ListField(child=serializers.CharField()) gender = serializers.CharField(source='gender_display') percentiles = serializers.SerializerMethodField() allegation_count = serializers.IntegerField() percentile_allegation",
"return 40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id = serializers.IntegerField(source='id') date_sort = serializers.SerializerMethodField() date = serializers.SerializerMethodField()",
"'lon': obj.point.x, 'lat': obj.point.y } except AttributeError: return None class OfficerPercentileMobileSerializer(NoNullSerializer): percentile_trr =",
"OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def get_active(self, obj): return obj.get_active_display() def get_badge(self, obj): return obj.current_badge or",
"= serializers.DateField(source='effective_date', format=None) date = serializers.DateField(source='effective_date', format='%Y-%m-%d') def get_kind(self, obj): return 'UNIT_CHANGE' def",
"unit_name = serializers.SerializerMethodField() unit_description = serializers.SerializerMethodField() rank = serializers.SerializerMethodField() priority_sort = serializers.SerializerMethodField() kind",
"return 'RANK_CHANGE' def get_priority_sort(self, obj): return 25 def get_rank(self, obj): return obj.rank class",
"decimal_places=4, allow_null=True) honorable_mention_count = serializers.IntegerField() sustained_count = serializers.IntegerField() unsustained_count = serializers.IntegerField() discipline_count =",
"= serializers.CharField() age = serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer): title = serializers.CharField() url = serializers.CharField()",
"obj.officeryearlypercentile_set.order_by('year') return OfficerYearlyPercentileSerializer(yearly_percentiles, many=True).data def get_active(self, obj): return obj.get_active_display() def get_badge(self, obj): return",
"get_priority_sort(self, obj): return 30 def get_point(self, obj): try: return { 'lon': obj.allegation.point.x, 'lat':",
"obj): return obj.trr_datetime.date() def get_date(self, obj): return obj.trr_datetime.date().strftime('%Y-%m-%d') def get_point(self, obj): try: return",
"allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_civilian = serializers.DecimalField( source='civilian_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) percentile_allegation_internal",
"serializers.SerializerMethodField() unit_description = serializers.SerializerMethodField() rank = serializers.SerializerMethodField() priority_sort = serializers.SerializerMethodField() kind = serializers.SerializerMethodField()",
"id = serializers.IntegerField() full_name = serializers.CharField() rank = serializers.CharField() coaccusal_count = serializers.IntegerField() class",
"outcome = serializers.CharField(source='final_outcome') coaccused = serializers.IntegerField(source='coaccused_count') attachments = serializers.SerializerMethodField() point = serializers.SerializerMethodField() victims",
"serializers.CharField() preview_image_url = serializers.CharField() file_type = serializers.CharField() id = serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort",
"AttachmentFileMobileSerializer(NoNullSerializer): title = serializers.CharField() url = serializers.CharField() preview_image_url = serializers.CharField() file_type = serializers.CharField()",
"serializers.SerializerMethodField() def get_kind(self, obj): return 'FORCE' def get_priority_sort(self, obj): return 50 def get_date_sort(self,",
") percentile_trr = serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4, allow_null=True) honorable_mention_count = serializers.IntegerField() sustained_count = serializers.IntegerField()",
"= serializers.CharField(source='final_outcome') coaccused = serializers.IntegerField(source='coaccused_count') attachments = serializers.SerializerMethodField() point = serializers.SerializerMethodField() victims =",
"obj): return obj.rank_name class RankChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='spp_date', format=None) date = serializers.DateField(source='spp_date', format='%Y-%m-%d')",
"class BaseTimelineMobileSerializer(NoNullSerializer): unit_name = serializers.SerializerMethodField() unit_description = serializers.SerializerMethodField() rank = serializers.SerializerMethodField() priority_sort =",
"preview_image_url = serializers.CharField() file_type = serializers.CharField() id = serializers.CharField() class CRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort =",
"obj): return 25 def get_rank(self, obj): return obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='appointed_date',",
"date_of_appt = serializers.DateField(source='appointed_date', format='%Y-%m-%d') date_of_resignation = serializers.DateField(source='resignation_date', format='%Y-%m-%d') active = serializers.SerializerMethodField() rank =",
"serializers.DecimalField( source='internal_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id = serializers.IntegerField() full_name =",
"serializers.IntegerField() discipline_count = serializers.IntegerField() civilian_compliment_count = serializers.IntegerField() trr_count = serializers.IntegerField() major_award_count = serializers.IntegerField()",
"class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='appointed_date', format=None) date = serializers.DateField(source='appointed_date', format='%Y-%m-%d') def get_kind(self, obj):",
"def get_kind(self, obj): return 'UNIT_CHANGE' def get_priority_sort(self, obj): return 20 class VictimMobileSerializer(NoNullSerializer): gender",
"serializers.IntegerField() honorable_mention_percentile = serializers.DecimalField(max_digits=6, decimal_places=4, allow_null=True) def get_percentiles(self, obj): yearly_percentiles = obj.officeryearlypercentile_set.order_by('year') return",
"obj.allegation.point.y } except AttributeError: return None def get_attachments(self, obj): return AttachmentFileMobileSerializer(obj.allegation.prefetch_filtered_attachments, many=True).data class",
"discipline_count = serializers.IntegerField() civilian_compliment_count = serializers.IntegerField() trr_count = serializers.IntegerField() major_award_count = serializers.IntegerField() honorable_mention_percentile",
"format='%Y-%m-%d') def get_kind(self, obj): return 'RANK_CHANGE' def get_priority_sort(self, obj): return 25 def get_rank(self,",
"obj): return obj.allegation.incident_date.date() def get_date(self, obj): return obj.allegation.incident_date.date().strftime('%Y-%m-%d') def get_category(self, obj): return obj.category",
"max_digits=6, decimal_places=4) percentile_allegation_internal = serializers.DecimalField( source='internal_allegation_percentile', allow_null=True, read_only=True, max_digits=6, decimal_places=4) class CoaccusalCardMobileSerializer(OfficerPercentileSerializer): id",
"= serializers.CharField() def get_kind(self, obj): return 'AWARD' def get_priority_sort(self, obj): return 40 class",
"def get_category(self, obj): return obj.category if obj.category else 'Unknown' def get_kind(self, obj): return",
"format='%Y-%m-%d') def get_kind(self, obj): return 'JOINED' def get_priority_sort(self, obj): return 10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer):",
"return obj.rank class JoinedNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='appointed_date', format=None) date = serializers.DateField(source='appointed_date', format='%Y-%m-%d') def",
"firearm_used = serializers.NullBooleanField() point = serializers.SerializerMethodField() def get_kind(self, obj): return 'FORCE' def get_priority_sort(self,",
"serializers.ListField(child=serializers.CharField()) gender = serializers.CharField(source='gender_display') percentiles = serializers.SerializerMethodField() allegation_count = serializers.IntegerField() percentile_allegation = serializers.DecimalField(",
"return { 'lon': obj.point.x, 'lat': obj.point.y } except AttributeError: return None class OfficerPercentileMobileSerializer(NoNullSerializer):",
"percentile_trr = serializers.DecimalField(source='trr_percentile', max_digits=6, decimal_places=4, allow_null=True) honorable_mention_count = serializers.IntegerField() sustained_count = serializers.IntegerField() unsustained_count",
"get_priority_sort(self, obj): return 20 class VictimMobileSerializer(NoNullSerializer): gender = serializers.CharField(source='gender_display') race = serializers.CharField() age",
"try: return { 'lon': obj.allegation.point.x, 'lat': obj.allegation.point.y } except AttributeError: return None def",
"serializers.SerializerMethodField() date = serializers.SerializerMethodField() taser = serializers.NullBooleanField() firearm_used = serializers.NullBooleanField() point = serializers.SerializerMethodField()",
"serializers.SerializerMethodField() rank = serializers.CharField() race = serializers.CharField() birth_year = serializers.IntegerField() badge = serializers.SerializerMethodField()",
"serializers.IntegerField(source='coaccused_count') attachments = serializers.SerializerMethodField() point = serializers.SerializerMethodField() victims = VictimMobileSerializer(many=True) def get_date_sort(self, obj):",
"'AWARD' def get_priority_sort(self, obj): return 40 class TRRNewTimelineMobileSerializer(BaseTimelineMobileSerializer): trr_id = serializers.IntegerField(source='id') date_sort =",
"def get_kind(self, obj): return 'JOINED' def get_priority_sort(self, obj): return 10 class UnitChangeNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort",
"class AwardNewTimelineMobileSerializer(BaseTimelineMobileSerializer): date_sort = serializers.DateField(source='start_date', format=None) date = serializers.DateField(source='start_date', format='%Y-%m-%d') award_type = serializers.CharField()",
"def get_point(self, obj): try: return { 'lon': obj.point.x, 'lat': obj.point.y } except AttributeError:",
"= serializers.IntegerField() badge = serializers.SerializerMethodField() historic_badges = serializers.ListField(child=serializers.CharField()) gender = serializers.CharField(source='gender_display') percentiles =",
"class VictimMobileSerializer(NoNullSerializer): gender = serializers.CharField(source='gender_display') race = serializers.CharField() age = serializers.IntegerField() class AttachmentFileMobileSerializer(NoNullSerializer):"
] |
[
"\"/home/jason/nn/osm-changeset-classification/osm-planet\" historyDbFileName = \"osm-planet/history.sqlite\" #historyPBF = \"../osm-data/vermont.osh.pbf\" #historyPBF = 'osm-planet/history-180319.osm.pbf' historyPBF = \"/media/jason/E46AC1AC6AC17BB4/Remillard/maps/osm2017/history-180319.osm.pbf\"",
"= \"/home/jason/nn/osm-changeset-classification/osm-planet\" historyDbFileName = \"osm-planet/history.sqlite\" #historyPBF = \"../osm-data/vermont.osh.pbf\" #historyPBF = 'osm-planet/history-180319.osm.pbf' historyPBF =",
"\"osm-planet/history.sqlite\" #historyPBF = \"../osm-data/vermont.osh.pbf\" #historyPBF = 'osm-planet/history-180319.osm.pbf' historyPBF = \"/media/jason/E46AC1AC6AC17BB4/Remillard/maps/osm2017/history-180319.osm.pbf\" changeSetHistoryOSM = \"/media/jason/E46AC1AC6AC17BB4/Remillard/maps/osm2017/changesets-180319.osm.bz2\"",
"historyDbTempDirName = \"/home/jason/nn/osm-changeset-classification/osm-planet\" historyDbFileName = \"osm-planet/history.sqlite\" #historyPBF = \"../osm-data/vermont.osh.pbf\" #historyPBF = 'osm-planet/history-180319.osm.pbf' historyPBF",
"= \"osm-planet/history.sqlite\" #historyPBF = \"../osm-data/vermont.osh.pbf\" #historyPBF = 'osm-planet/history-180319.osm.pbf' historyPBF = \"/media/jason/E46AC1AC6AC17BB4/Remillard/maps/osm2017/history-180319.osm.pbf\" changeSetHistoryOSM =",
"<filename>osmcsclassify/Config.py historyDbTempDirName = \"/home/jason/nn/osm-changeset-classification/osm-planet\" historyDbFileName = \"osm-planet/history.sqlite\" #historyPBF = \"../osm-data/vermont.osh.pbf\" #historyPBF = 'osm-planet/history-180319.osm.pbf'",
"historyDbFileName = \"osm-planet/history.sqlite\" #historyPBF = \"../osm-data/vermont.osh.pbf\" #historyPBF = 'osm-planet/history-180319.osm.pbf' historyPBF = \"/media/jason/E46AC1AC6AC17BB4/Remillard/maps/osm2017/history-180319.osm.pbf\" changeSetHistoryOSM"
] |
[
"mcmc_sampler(data, prior_pdf, likelihood, propose_pdf, propose_func, xx, param, adaptive_func=None, min_run=1000, max_rum=3000, gap=100): # initialization",
"is not None and accept_num >= 10 and m % gap == 0):",
"0.0, 0.0 Q_now, Q_try = 0.0, 0.0 # step 1: propose a value",
"np.sqrt(np.var(A) + np.var(B)) return Z def mcmc_sampler(data, prior_pdf, likelihood, propose_pdf, propose_func, xx, param,",
"accept_num >= 10 and m % gap == 0): param = adaptive_func(X_all[:m,:], param)",
"= L_all[:m] X_all = X_all[:m,:] break # step 4: adaptive MCMC if (adaptive_func",
"10 and m % gap == 0): param = adaptive_func(X_all[:m,:], param) print(\"MCMC summary:",
"= X[:int(first*N)] B = X[int(last*N):] if np.sqrt(np.var(A) + np.var(B)) == 0: Z =",
"X_all[:m,:] break # step 4: adaptive MCMC if (adaptive_func is not None and",
"Geweke_Z(X_all[:m, k]) if sum(z_scores <= 2) == len(z_scores): L_all = L_all[:m] X_all =",
"accept_num = 0 L_all = np.zeros(max_rum) X_all = np.zeros((max_rum, len(X_now))) for m in",
"likelihood(X_now, data, param) P_now = prior_pdf(X_now) + L_now # MCMC running accept_num =",
"a value X_try = propose_func(X_now, param) Q_now = propose_pdf(X_now, X_try, param) Q_try =",
"MCMC running accept_num = 0 L_all = np.zeros(max_rum) X_all = np.zeros((max_rum, len(X_now))) for",
"1: propose a value X_try = propose_func(X_now, param) Q_now = propose_pdf(X_now, X_try, param)",
"gap == 0: z_scores = np.zeros(X_all.shape[1]) for k in range(X_all.shape[1]): z_scores[k] = Geweke_Z(X_all[:m,",
"len(z_scores): L_all = L_all[:m] X_all = X_all[:m,:] break # step 4: adaptive MCMC",
"gap=100): # initialization X_now = propose_func(xx, param) L_now = likelihood(X_now, data, param) P_now",
"X_try, param) Q_try = propose_pdf(X_try, X_now, param) L_try = likelihood(X_try, data, param) P_try",
"gap == 0): param = adaptive_func(X_all[:m,:], param) print(\"MCMC summary: %d acceptance in %d",
"alpha is none!\") elif np.random.rand(1) < alpha: accept_num += 1 X_now = X_try",
"= 0.0, 0.0 Q_now, Q_try = 0.0, 0.0 # step 1: propose a",
"print(\"Warning: accept ratio alpha is none!\") elif np.random.rand(1) < alpha: accept_num += 1",
"likelihood, propose_pdf, propose_func, xx, param, adaptive_func=None, min_run=1000, max_rum=3000, gap=100): # initialization X_now =",
"P_now = P_try + 0.0 L_now = L_try + 0.0 L_all[m] = L_now",
"0.0 Q_now, Q_try = 0.0, 0.0 # step 1: propose a value X_try",
"= likelihood(X_now, data, param) P_now = prior_pdf(X_now) + L_now # MCMC running accept_num",
"param) L_try = likelihood(X_try, data, param) P_try = prior_pdf(X_try) + L_try # step",
"Z = abs(A.mean() - B.mean()) / np.sqrt(np.var(A) + np.var(B)) return Z def mcmc_sampler(data,",
">= min_run and m % gap == 0: z_scores = np.zeros(X_all.shape[1]) for k",
"None: print(\"Warning: accept ratio alpha is none!\") elif np.random.rand(1) < alpha: accept_num +=",
"in range(X_all.shape[1]): z_scores[k] = Geweke_Z(X_all[:m, k]) if sum(z_scores <= 2) == len(z_scores): L_all",
"param) print(\"MCMC summary: %d acceptance in %d run (%.1f%%).\" %(accept_num, m, accept_num*100.0/m)) return",
"abs(A.mean() - B.mean()) / np.sqrt(np.var(A) + np.var(B)) return Z def mcmc_sampler(data, prior_pdf, likelihood,",
"Geweke_Z(X, first=0.1, last=0.5): N = X.shape[0] A = X[:int(first*N)] B = X[int(last*N):] if",
"% gap == 0: z_scores = np.zeros(X_all.shape[1]) for k in range(X_all.shape[1]): z_scores[k] =",
"if alpha is None: print(\"Warning: accept ratio alpha is none!\") elif np.random.rand(1) <",
"= None else: Z = abs(A.mean() - B.mean()) / np.sqrt(np.var(A) + np.var(B)) return",
"X[int(last*N):] if np.sqrt(np.var(A) + np.var(B)) == 0: Z = None else: Z =",
"np.sqrt(np.var(A) + np.var(B)) == 0: Z = None else: Z = abs(A.mean() -",
"running accept_num = 0 L_all = np.zeros(max_rum) X_all = np.zeros((max_rum, len(X_now))) for m",
"len(X_now))) for m in range(max_rum): P_try, L_try = 0.0, 0.0 Q_now, Q_try =",
"B.mean()) / np.sqrt(np.var(A) + np.var(B)) return Z def mcmc_sampler(data, prior_pdf, likelihood, propose_pdf, propose_func,",
"Z def mcmc_sampler(data, prior_pdf, likelihood, propose_pdf, propose_func, xx, param, adaptive_func=None, min_run=1000, max_rum=3000, gap=100):",
"step 3. convergence diagnostics if m >= min_run and m % gap ==",
"param) Q_try = propose_pdf(X_try, X_now, param) L_try = likelihood(X_try, data, param) P_try =",
"np.zeros(X_all.shape[1]) for k in range(X_all.shape[1]): z_scores[k] = Geweke_Z(X_all[:m, k]) if sum(z_scores <= 2)",
"in range(max_rum): P_try, L_try = 0.0, 0.0 Q_now, Q_try = 0.0, 0.0 #",
"L_try = likelihood(X_try, data, param) P_try = prior_pdf(X_try) + L_try # step 2:",
"diagnostics if m >= min_run and m % gap == 0: z_scores =",
"Q_try = 0.0, 0.0 # step 1: propose a value X_try = propose_func(X_now,",
"+ 0.0 P_now = P_try + 0.0 L_now = L_try + 0.0 L_all[m]",
"N = X.shape[0] A = X[:int(first*N)] B = X[int(last*N):] if np.sqrt(np.var(A) + np.var(B))",
"is none!\") elif np.random.rand(1) < alpha: accept_num += 1 X_now = X_try +",
"def Geweke_Z(X, first=0.1, last=0.5): N = X.shape[0] A = X[:int(first*N)] B = X[int(last*N):]",
"propose_pdf(X_try, X_now, param) L_try = likelihood(X_try, data, param) P_try = prior_pdf(X_try) + L_try",
"+ np.var(B)) == 0: Z = None else: Z = abs(A.mean() - B.mean())",
"+ 0.0 L_now = L_try + 0.0 L_all[m] = L_now X_all[m,:] = X_now",
"np.var(B)) return Z def mcmc_sampler(data, prior_pdf, likelihood, propose_pdf, propose_func, xx, param, adaptive_func=None, min_run=1000,",
"as np def Geweke_Z(X, first=0.1, last=0.5): N = X.shape[0] A = X[:int(first*N)] B",
"first=0.1, last=0.5): N = X.shape[0] A = X[:int(first*N)] B = X[int(last*N):] if np.sqrt(np.var(A)",
"= likelihood(X_try, data, param) P_try = prior_pdf(X_try) + L_try # step 2: accept",
"z_scores = np.zeros(X_all.shape[1]) for k in range(X_all.shape[1]): z_scores[k] = Geweke_Z(X_all[:m, k]) if sum(z_scores",
"0.0, 0.0 # step 1: propose a value X_try = propose_func(X_now, param) Q_now",
"adaptive_func=None, min_run=1000, max_rum=3000, gap=100): # initialization X_now = propose_func(xx, param) L_now = likelihood(X_now,",
"= propose_func(xx, param) L_now = likelihood(X_now, data, param) P_now = prior_pdf(X_now) + L_now",
"alpha = np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if alpha is None: print(\"Warning: accept ratio alpha is",
"m % gap == 0: z_scores = np.zeros(X_all.shape[1]) for k in range(X_all.shape[1]): z_scores[k]",
"accept or reject the proposal alpha = np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if alpha is None:",
"propose_func, xx, param, adaptive_func=None, min_run=1000, max_rum=3000, gap=100): # initialization X_now = propose_func(xx, param)",
"B = X[int(last*N):] if np.sqrt(np.var(A) + np.var(B)) == 0: Z = None else:",
"= X[int(last*N):] if np.sqrt(np.var(A) + np.var(B)) == 0: Z = None else: Z",
"prior_pdf(X_now) + L_now # MCMC running accept_num = 0 L_all = np.zeros(max_rum) X_all",
"param) Q_now = propose_pdf(X_now, X_try, param) Q_try = propose_pdf(X_try, X_now, param) L_try =",
"1 X_now = X_try + 0.0 P_now = P_try + 0.0 L_now =",
"L_try + 0.0 L_all[m] = L_now X_all[m,:] = X_now # step 3. convergence",
"k in range(X_all.shape[1]): z_scores[k] = Geweke_Z(X_all[:m, k]) if sum(z_scores <= 2) == len(z_scores):",
"summary: %d acceptance in %d run (%.1f%%).\" %(accept_num, m, accept_num*100.0/m)) return X_all, L_all,",
"== len(z_scores): L_all = L_all[:m] X_all = X_all[:m,:] break # step 4: adaptive",
"last=0.5): N = X.shape[0] A = X[:int(first*N)] B = X[int(last*N):] if np.sqrt(np.var(A) +",
"m >= min_run and m % gap == 0: z_scores = np.zeros(X_all.shape[1]) for",
"Q_try = propose_pdf(X_try, X_now, param) L_try = likelihood(X_try, data, param) P_try = prior_pdf(X_try)",
"2: accept or reject the proposal alpha = np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if alpha is",
"X_now = X_try + 0.0 P_now = P_try + 0.0 L_now = L_try",
"import numpy as np def Geweke_Z(X, first=0.1, last=0.5): N = X.shape[0] A =",
"k]) if sum(z_scores <= 2) == len(z_scores): L_all = L_all[:m] X_all = X_all[:m,:]",
"param = adaptive_func(X_all[:m,:], param) print(\"MCMC summary: %d acceptance in %d run (%.1f%%).\" %(accept_num,",
"numpy as np def Geweke_Z(X, first=0.1, last=0.5): N = X.shape[0] A = X[:int(first*N)]",
"X_try = propose_func(X_now, param) Q_now = propose_pdf(X_now, X_try, param) Q_try = propose_pdf(X_try, X_now,",
"+= 1 X_now = X_try + 0.0 P_now = P_try + 0.0 L_now",
"param) P_now = prior_pdf(X_now) + L_now # MCMC running accept_num = 0 L_all",
"X.shape[0] A = X[:int(first*N)] B = X[int(last*N):] if np.sqrt(np.var(A) + np.var(B)) == 0:",
"# MCMC running accept_num = 0 L_all = np.zeros(max_rum) X_all = np.zeros((max_rum, len(X_now)))",
"np.random.rand(1) < alpha: accept_num += 1 X_now = X_try + 0.0 P_now =",
"def mcmc_sampler(data, prior_pdf, likelihood, propose_pdf, propose_func, xx, param, adaptive_func=None, min_run=1000, max_rum=3000, gap=100): #",
"2) == len(z_scores): L_all = L_all[:m] X_all = X_all[:m,:] break # step 4:",
"for m in range(max_rum): P_try, L_try = 0.0, 0.0 Q_now, Q_try = 0.0,",
"+ 0.0 L_all[m] = L_now X_all[m,:] = X_now # step 3. convergence diagnostics",
"0.0 L_all[m] = L_now X_all[m,:] = X_now # step 3. convergence diagnostics if",
"print(\"MCMC summary: %d acceptance in %d run (%.1f%%).\" %(accept_num, m, accept_num*100.0/m)) return X_all,",
"% gap == 0): param = adaptive_func(X_all[:m,:], param) print(\"MCMC summary: %d acceptance in",
"<reponame>houruiyan/hilearn<gh_stars>1-10 import numpy as np def Geweke_Z(X, first=0.1, last=0.5): N = X.shape[0] A",
"4: adaptive MCMC if (adaptive_func is not None and accept_num >= 10 and",
"propose_func(xx, param) L_now = likelihood(X_now, data, param) P_now = prior_pdf(X_now) + L_now #",
"= adaptive_func(X_all[:m,:], param) print(\"MCMC summary: %d acceptance in %d run (%.1f%%).\" %(accept_num, m,",
"# step 3. convergence diagnostics if m >= min_run and m % gap",
"data, param) P_try = prior_pdf(X_try) + L_try # step 2: accept or reject",
"prior_pdf, likelihood, propose_pdf, propose_func, xx, param, adaptive_func=None, min_run=1000, max_rum=3000, gap=100): # initialization X_now",
"and m % gap == 0: z_scores = np.zeros(X_all.shape[1]) for k in range(X_all.shape[1]):",
"min_run and m % gap == 0: z_scores = np.zeros(X_all.shape[1]) for k in",
"= abs(A.mean() - B.mean()) / np.sqrt(np.var(A) + np.var(B)) return Z def mcmc_sampler(data, prior_pdf,",
"initialization X_now = propose_func(xx, param) L_now = likelihood(X_now, data, param) P_now = prior_pdf(X_now)",
"= X_now # step 3. convergence diagnostics if m >= min_run and m",
"param) L_now = likelihood(X_now, data, param) P_now = prior_pdf(X_now) + L_now # MCMC",
"adaptive MCMC if (adaptive_func is not None and accept_num >= 10 and m",
"< alpha: accept_num += 1 X_now = X_try + 0.0 P_now = P_try",
"%d acceptance in %d run (%.1f%%).\" %(accept_num, m, accept_num*100.0/m)) return X_all, L_all, accept_num",
"step 1: propose a value X_try = propose_func(X_now, param) Q_now = propose_pdf(X_now, X_try,",
"= np.zeros(max_rum) X_all = np.zeros((max_rum, len(X_now))) for m in range(max_rum): P_try, L_try =",
"np.zeros(max_rum) X_all = np.zeros((max_rum, len(X_now))) for m in range(max_rum): P_try, L_try = 0.0,",
"np.zeros((max_rum, len(X_now))) for m in range(max_rum): P_try, L_try = 0.0, 0.0 Q_now, Q_try",
"# step 2: accept or reject the proposal alpha = np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if",
"= X_all[:m,:] break # step 4: adaptive MCMC if (adaptive_func is not None",
"= propose_func(X_now, param) Q_now = propose_pdf(X_now, X_try, param) Q_try = propose_pdf(X_try, X_now, param)",
"<= 2) == len(z_scores): L_all = L_all[:m] X_all = X_all[:m,:] break # step",
"= 0 L_all = np.zeros(max_rum) X_all = np.zeros((max_rum, len(X_now))) for m in range(max_rum):",
"= 0.0, 0.0 # step 1: propose a value X_try = propose_func(X_now, param)",
"0 L_all = np.zeros(max_rum) X_all = np.zeros((max_rum, len(X_now))) for m in range(max_rum): P_try,",
"L_all[m] = L_now X_all[m,:] = X_now # step 3. convergence diagnostics if m",
"and m % gap == 0): param = adaptive_func(X_all[:m,:], param) print(\"MCMC summary: %d",
"+ L_try # step 2: accept or reject the proposal alpha = np.exp(min(P_try+Q_now-P_now-Q_try,",
"propose_func(X_now, param) Q_now = propose_pdf(X_now, X_try, param) Q_try = propose_pdf(X_try, X_now, param) L_try",
"= X.shape[0] A = X[:int(first*N)] B = X[int(last*N):] if np.sqrt(np.var(A) + np.var(B)) ==",
"prior_pdf(X_try) + L_try # step 2: accept or reject the proposal alpha =",
"MCMC if (adaptive_func is not None and accept_num >= 10 and m %",
"P_try + 0.0 L_now = L_try + 0.0 L_all[m] = L_now X_all[m,:] =",
"= np.zeros((max_rum, len(X_now))) for m in range(max_rum): P_try, L_try = 0.0, 0.0 Q_now,",
"reject the proposal alpha = np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if alpha is None: print(\"Warning: accept",
"for k in range(X_all.shape[1]): z_scores[k] = Geweke_Z(X_all[:m, k]) if sum(z_scores <= 2) ==",
"L_now X_all[m,:] = X_now # step 3. convergence diagnostics if m >= min_run",
"L_try # step 2: accept or reject the proposal alpha = np.exp(min(P_try+Q_now-P_now-Q_try, 0))",
"elif np.random.rand(1) < alpha: accept_num += 1 X_now = X_try + 0.0 P_now",
"# step 1: propose a value X_try = propose_func(X_now, param) Q_now = propose_pdf(X_now,",
"range(X_all.shape[1]): z_scores[k] = Geweke_Z(X_all[:m, k]) if sum(z_scores <= 2) == len(z_scores): L_all =",
"np def Geweke_Z(X, first=0.1, last=0.5): N = X.shape[0] A = X[:int(first*N)] B =",
"is None: print(\"Warning: accept ratio alpha is none!\") elif np.random.rand(1) < alpha: accept_num",
"L_all = L_all[:m] X_all = X_all[:m,:] break # step 4: adaptive MCMC if",
"0): param = adaptive_func(X_all[:m,:], param) print(\"MCMC summary: %d acceptance in %d run (%.1f%%).\"",
"param, adaptive_func=None, min_run=1000, max_rum=3000, gap=100): # initialization X_now = propose_func(xx, param) L_now =",
"value X_try = propose_func(X_now, param) Q_now = propose_pdf(X_now, X_try, param) Q_try = propose_pdf(X_try,",
"adaptive_func(X_all[:m,:], param) print(\"MCMC summary: %d acceptance in %d run (%.1f%%).\" %(accept_num, m, accept_num*100.0/m))",
"data, param) P_now = prior_pdf(X_now) + L_now # MCMC running accept_num = 0",
"accept_num += 1 X_now = X_try + 0.0 P_now = P_try + 0.0",
"= propose_pdf(X_try, X_now, param) L_try = likelihood(X_try, data, param) P_try = prior_pdf(X_try) +",
"range(max_rum): P_try, L_try = 0.0, 0.0 Q_now, Q_try = 0.0, 0.0 # step",
"np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if alpha is None: print(\"Warning: accept ratio alpha is none!\") elif",
"and accept_num >= 10 and m % gap == 0): param = adaptive_func(X_all[:m,:],",
"convergence diagnostics if m >= min_run and m % gap == 0: z_scores",
"propose_pdf, propose_func, xx, param, adaptive_func=None, min_run=1000, max_rum=3000, gap=100): # initialization X_now = propose_func(xx,",
"sum(z_scores <= 2) == len(z_scores): L_all = L_all[:m] X_all = X_all[:m,:] break #",
"# step 4: adaptive MCMC if (adaptive_func is not None and accept_num >=",
"L_all[:m] X_all = X_all[:m,:] break # step 4: adaptive MCMC if (adaptive_func is",
"m % gap == 0): param = adaptive_func(X_all[:m,:], param) print(\"MCMC summary: %d acceptance",
"m in range(max_rum): P_try, L_try = 0.0, 0.0 Q_now, Q_try = 0.0, 0.0",
"= L_try + 0.0 L_all[m] = L_now X_all[m,:] = X_now # step 3.",
"z_scores[k] = Geweke_Z(X_all[:m, k]) if sum(z_scores <= 2) == len(z_scores): L_all = L_all[:m]",
"0.0 P_now = P_try + 0.0 L_now = L_try + 0.0 L_all[m] =",
"none!\") elif np.random.rand(1) < alpha: accept_num += 1 X_now = X_try + 0.0",
"X_now = propose_func(xx, param) L_now = likelihood(X_now, data, param) P_now = prior_pdf(X_now) +",
"= prior_pdf(X_try) + L_try # step 2: accept or reject the proposal alpha",
"= np.zeros(X_all.shape[1]) for k in range(X_all.shape[1]): z_scores[k] = Geweke_Z(X_all[:m, k]) if sum(z_scores <=",
"the proposal alpha = np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if alpha is None: print(\"Warning: accept ratio",
"= L_now X_all[m,:] = X_now # step 3. convergence diagnostics if m >=",
"= propose_pdf(X_now, X_try, param) Q_try = propose_pdf(X_try, X_now, param) L_try = likelihood(X_try, data,",
"= P_try + 0.0 L_now = L_try + 0.0 L_all[m] = L_now X_all[m,:]",
"X_now # step 3. convergence diagnostics if m >= min_run and m %",
"= Geweke_Z(X_all[:m, k]) if sum(z_scores <= 2) == len(z_scores): L_all = L_all[:m] X_all",
"X_all = X_all[:m,:] break # step 4: adaptive MCMC if (adaptive_func is not",
"min_run=1000, max_rum=3000, gap=100): # initialization X_now = propose_func(xx, param) L_now = likelihood(X_now, data,",
"if m >= min_run and m % gap == 0: z_scores = np.zeros(X_all.shape[1])",
"not None and accept_num >= 10 and m % gap == 0): param",
"param) P_try = prior_pdf(X_try) + L_try # step 2: accept or reject the",
"/ np.sqrt(np.var(A) + np.var(B)) return Z def mcmc_sampler(data, prior_pdf, likelihood, propose_pdf, propose_func, xx,",
"X_all = np.zeros((max_rum, len(X_now))) for m in range(max_rum): P_try, L_try = 0.0, 0.0",
"0)) if alpha is None: print(\"Warning: accept ratio alpha is none!\") elif np.random.rand(1)",
"0.0 L_now = L_try + 0.0 L_all[m] = L_now X_all[m,:] = X_now #",
"== 0: z_scores = np.zeros(X_all.shape[1]) for k in range(X_all.shape[1]): z_scores[k] = Geweke_Z(X_all[:m, k])",
"X_now, param) L_try = likelihood(X_try, data, param) P_try = prior_pdf(X_try) + L_try #",
"likelihood(X_try, data, param) P_try = prior_pdf(X_try) + L_try # step 2: accept or",
"L_try = 0.0, 0.0 Q_now, Q_try = 0.0, 0.0 # step 1: propose",
"A = X[:int(first*N)] B = X[int(last*N):] if np.sqrt(np.var(A) + np.var(B)) == 0: Z",
"= np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if alpha is None: print(\"Warning: accept ratio alpha is none!\")",
"Z = None else: Z = abs(A.mean() - B.mean()) / np.sqrt(np.var(A) + np.var(B))",
"xx, param, adaptive_func=None, min_run=1000, max_rum=3000, gap=100): # initialization X_now = propose_func(xx, param) L_now",
"X_try + 0.0 P_now = P_try + 0.0 L_now = L_try + 0.0",
"+ np.var(B)) return Z def mcmc_sampler(data, prior_pdf, likelihood, propose_pdf, propose_func, xx, param, adaptive_func=None,",
"Q_now, Q_try = 0.0, 0.0 # step 1: propose a value X_try =",
"max_rum=3000, gap=100): # initialization X_now = propose_func(xx, param) L_now = likelihood(X_now, data, param)",
"proposal alpha = np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if alpha is None: print(\"Warning: accept ratio alpha",
"= X_try + 0.0 P_now = P_try + 0.0 L_now = L_try +",
"step 4: adaptive MCMC if (adaptive_func is not None and accept_num >= 10",
">= 10 and m % gap == 0): param = adaptive_func(X_all[:m,:], param) print(\"MCMC",
"L_now # MCMC running accept_num = 0 L_all = np.zeros(max_rum) X_all = np.zeros((max_rum,",
"None and accept_num >= 10 and m % gap == 0): param =",
"(adaptive_func is not None and accept_num >= 10 and m % gap ==",
"ratio alpha is none!\") elif np.random.rand(1) < alpha: accept_num += 1 X_now =",
"or reject the proposal alpha = np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if alpha is None: print(\"Warning:",
"L_now = likelihood(X_now, data, param) P_now = prior_pdf(X_now) + L_now # MCMC running",
"- B.mean()) / np.sqrt(np.var(A) + np.var(B)) return Z def mcmc_sampler(data, prior_pdf, likelihood, propose_pdf,",
"accept ratio alpha is none!\") elif np.random.rand(1) < alpha: accept_num += 1 X_now",
"propose a value X_try = propose_func(X_now, param) Q_now = propose_pdf(X_now, X_try, param) Q_try",
"np.var(B)) == 0: Z = None else: Z = abs(A.mean() - B.mean()) /",
"return Z def mcmc_sampler(data, prior_pdf, likelihood, propose_pdf, propose_func, xx, param, adaptive_func=None, min_run=1000, max_rum=3000,",
"# initialization X_now = propose_func(xx, param) L_now = likelihood(X_now, data, param) P_now =",
"propose_pdf(X_now, X_try, param) Q_try = propose_pdf(X_try, X_now, param) L_try = likelihood(X_try, data, param)",
"if (adaptive_func is not None and accept_num >= 10 and m % gap",
"Q_now = propose_pdf(X_now, X_try, param) Q_try = propose_pdf(X_try, X_now, param) L_try = likelihood(X_try,",
"None else: Z = abs(A.mean() - B.mean()) / np.sqrt(np.var(A) + np.var(B)) return Z",
"if sum(z_scores <= 2) == len(z_scores): L_all = L_all[:m] X_all = X_all[:m,:] break",
"== 0): param = adaptive_func(X_all[:m,:], param) print(\"MCMC summary: %d acceptance in %d run",
"P_try = prior_pdf(X_try) + L_try # step 2: accept or reject the proposal",
"else: Z = abs(A.mean() - B.mean()) / np.sqrt(np.var(A) + np.var(B)) return Z def",
"alpha is None: print(\"Warning: accept ratio alpha is none!\") elif np.random.rand(1) < alpha:",
"0.0 # step 1: propose a value X_try = propose_func(X_now, param) Q_now =",
"L_now = L_try + 0.0 L_all[m] = L_now X_all[m,:] = X_now # step",
"== 0: Z = None else: Z = abs(A.mean() - B.mean()) / np.sqrt(np.var(A)",
"P_try, L_try = 0.0, 0.0 Q_now, Q_try = 0.0, 0.0 # step 1:",
"0: z_scores = np.zeros(X_all.shape[1]) for k in range(X_all.shape[1]): z_scores[k] = Geweke_Z(X_all[:m, k]) if",
"= prior_pdf(X_now) + L_now # MCMC running accept_num = 0 L_all = np.zeros(max_rum)",
"alpha: accept_num += 1 X_now = X_try + 0.0 P_now = P_try +",
"P_now = prior_pdf(X_now) + L_now # MCMC running accept_num = 0 L_all =",
"L_all = np.zeros(max_rum) X_all = np.zeros((max_rum, len(X_now))) for m in range(max_rum): P_try, L_try",
"X[:int(first*N)] B = X[int(last*N):] if np.sqrt(np.var(A) + np.var(B)) == 0: Z = None",
"0: Z = None else: Z = abs(A.mean() - B.mean()) / np.sqrt(np.var(A) +",
"step 2: accept or reject the proposal alpha = np.exp(min(P_try+Q_now-P_now-Q_try, 0)) if alpha",
"if np.sqrt(np.var(A) + np.var(B)) == 0: Z = None else: Z = abs(A.mean()",
"X_all[m,:] = X_now # step 3. convergence diagnostics if m >= min_run and",
"break # step 4: adaptive MCMC if (adaptive_func is not None and accept_num",
"+ L_now # MCMC running accept_num = 0 L_all = np.zeros(max_rum) X_all =",
"3. convergence diagnostics if m >= min_run and m % gap == 0:"
] |
[] |
[
"utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.db import",
"model\"\"\" Site = apps.get_model('sites', 'Site') Site.objects.all().delete() # Register SITE_ID = 1 try: domain",
"__future__ import unicode_literals from django.conf import settings from django.db import migrations, models def",
"from django.db import migrations, models def setup_site(apps, schema_editor): \"\"\"Populate the sites model\"\"\" Site",
"settings.DOMAIN except: domain = 'example.com' Site.objects.create(domain=domain, name='ProjMan') class Migration(migrations.Migration): dependencies = [ ('sites',",
"from django.conf import settings from django.db import migrations, models def setup_site(apps, schema_editor): \"\"\"Populate",
"import settings from django.db import migrations, models def setup_site(apps, schema_editor): \"\"\"Populate the sites",
"the sites model\"\"\" Site = apps.get_model('sites', 'Site') Site.objects.all().delete() # Register SITE_ID = 1",
"SITE_ID = 1 try: domain = settings.DOMAIN except: domain = 'example.com' Site.objects.create(domain=domain, name='ProjMan')",
"import migrations, models def setup_site(apps, schema_editor): \"\"\"Populate the sites model\"\"\" Site = apps.get_model('sites',",
"try: domain = settings.DOMAIN except: domain = 'example.com' Site.objects.create(domain=domain, name='ProjMan') class Migration(migrations.Migration): dependencies",
"-*- from __future__ import unicode_literals from django.conf import settings from django.db import migrations,",
"migrations, models def setup_site(apps, schema_editor): \"\"\"Populate the sites model\"\"\" Site = apps.get_model('sites', 'Site')",
"apps.get_model('sites', 'Site') Site.objects.all().delete() # Register SITE_ID = 1 try: domain = settings.DOMAIN except:",
"Site.objects.all().delete() # Register SITE_ID = 1 try: domain = settings.DOMAIN except: domain =",
"unicode_literals from django.conf import settings from django.db import migrations, models def setup_site(apps, schema_editor):",
"# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings",
"import unicode_literals from django.conf import settings from django.db import migrations, models def setup_site(apps,",
"models def setup_site(apps, schema_editor): \"\"\"Populate the sites model\"\"\" Site = apps.get_model('sites', 'Site') Site.objects.all().delete()",
"schema_editor): \"\"\"Populate the sites model\"\"\" Site = apps.get_model('sites', 'Site') Site.objects.all().delete() # Register SITE_ID",
"domain = settings.DOMAIN except: domain = 'example.com' Site.objects.create(domain=domain, name='ProjMan') class Migration(migrations.Migration): dependencies =",
"settings from django.db import migrations, models def setup_site(apps, schema_editor): \"\"\"Populate the sites model\"\"\"",
"Site = apps.get_model('sites', 'Site') Site.objects.all().delete() # Register SITE_ID = 1 try: domain =",
"1 try: domain = settings.DOMAIN except: domain = 'example.com' Site.objects.create(domain=domain, name='ProjMan') class Migration(migrations.Migration):",
"except: domain = 'example.com' Site.objects.create(domain=domain, name='ProjMan') class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'),",
"= apps.get_model('sites', 'Site') Site.objects.all().delete() # Register SITE_ID = 1 try: domain = settings.DOMAIN",
"'Site') Site.objects.all().delete() # Register SITE_ID = 1 try: domain = settings.DOMAIN except: domain",
"Register SITE_ID = 1 try: domain = settings.DOMAIN except: domain = 'example.com' Site.objects.create(domain=domain,",
"Site.objects.create(domain=domain, name='ProjMan') class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ] operations = [",
"domain = 'example.com' Site.objects.create(domain=domain, name='ProjMan') class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ]",
"django.db import migrations, models def setup_site(apps, schema_editor): \"\"\"Populate the sites model\"\"\" Site =",
"# Register SITE_ID = 1 try: domain = settings.DOMAIN except: domain = 'example.com'",
"sites model\"\"\" Site = apps.get_model('sites', 'Site') Site.objects.all().delete() # Register SITE_ID = 1 try:",
"def setup_site(apps, schema_editor): \"\"\"Populate the sites model\"\"\" Site = apps.get_model('sites', 'Site') Site.objects.all().delete() #",
"-*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from",
"\"\"\"Populate the sites model\"\"\" Site = apps.get_model('sites', 'Site') Site.objects.all().delete() # Register SITE_ID =",
"from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models",
"django.conf import settings from django.db import migrations, models def setup_site(apps, schema_editor): \"\"\"Populate the",
"= 'example.com' Site.objects.create(domain=domain, name='ProjMan') class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ] operations",
"'example.com' Site.objects.create(domain=domain, name='ProjMan') class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ] operations =",
"name='ProjMan') class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ] operations = [ migrations.RunPython(setup_site)",
"setup_site(apps, schema_editor): \"\"\"Populate the sites model\"\"\" Site = apps.get_model('sites', 'Site') Site.objects.all().delete() # Register",
"= settings.DOMAIN except: domain = 'example.com' Site.objects.create(domain=domain, name='ProjMan') class Migration(migrations.Migration): dependencies = [",
"= 1 try: domain = settings.DOMAIN except: domain = 'example.com' Site.objects.create(domain=domain, name='ProjMan') class",
"coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.db",
"class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ] operations = [ migrations.RunPython(setup_site) ]"
] |
[
"'daemon' attribute always return False def _get_daemon(self): return False def _set_daemon(self, value): pass",
"multiprocessing class GmapsProcess(multiprocessing.Process): # make 'daemon' attribute always return False def _get_daemon(self): return",
"pass daemon = property(_get_daemon, _set_daemon) # We sub-class multiprocessing.pool.Pool instead of multiprocessing.Pool #",
"multiprocessing.pool.Pool instead of multiprocessing.Pool # because the latter is only a wrapper function,",
"the latter is only a wrapper function, not a proper class. class GmapsProcessPool(multiprocessing.pool.Pool):",
"of multiprocessing.Pool # because the latter is only a wrapper function, not a",
"latter is only a wrapper function, not a proper class. class GmapsProcessPool(multiprocessing.pool.Pool): Process",
"GmapsProcess(multiprocessing.Process): # make 'daemon' attribute always return False def _get_daemon(self): return False def",
"because the latter is only a wrapper function, not a proper class. class",
"_set_daemon(self, value): pass daemon = property(_get_daemon, _set_daemon) # We sub-class multiprocessing.pool.Pool instead of",
"We sub-class multiprocessing.pool.Pool instead of multiprocessing.Pool # because the latter is only a",
"_get_daemon(self): return False def _set_daemon(self, value): pass daemon = property(_get_daemon, _set_daemon) # We",
"sub-class multiprocessing.pool.Pool instead of multiprocessing.Pool # because the latter is only a wrapper",
"value): pass daemon = property(_get_daemon, _set_daemon) # We sub-class multiprocessing.pool.Pool instead of multiprocessing.Pool",
"_set_daemon) # We sub-class multiprocessing.pool.Pool instead of multiprocessing.Pool # because the latter is",
"# We sub-class multiprocessing.pool.Pool instead of multiprocessing.Pool # because the latter is only",
"attribute always return False def _get_daemon(self): return False def _set_daemon(self, value): pass daemon",
"import multiprocessing class GmapsProcess(multiprocessing.Process): # make 'daemon' attribute always return False def _get_daemon(self):",
"= property(_get_daemon, _set_daemon) # We sub-class multiprocessing.pool.Pool instead of multiprocessing.Pool # because the",
"def _set_daemon(self, value): pass daemon = property(_get_daemon, _set_daemon) # We sub-class multiprocessing.pool.Pool instead",
"multiprocessing.Pool # because the latter is only a wrapper function, not a proper",
"# because the latter is only a wrapper function, not a proper class.",
"make 'daemon' attribute always return False def _get_daemon(self): return False def _set_daemon(self, value):",
"False def _get_daemon(self): return False def _set_daemon(self, value): pass daemon = property(_get_daemon, _set_daemon)",
"instead of multiprocessing.Pool # because the latter is only a wrapper function, not",
"only a wrapper function, not a proper class. class GmapsProcessPool(multiprocessing.pool.Pool): Process = GmapsProcess",
"property(_get_daemon, _set_daemon) # We sub-class multiprocessing.pool.Pool instead of multiprocessing.Pool # because the latter",
"False def _set_daemon(self, value): pass daemon = property(_get_daemon, _set_daemon) # We sub-class multiprocessing.pool.Pool",
"daemon = property(_get_daemon, _set_daemon) # We sub-class multiprocessing.pool.Pool instead of multiprocessing.Pool # because",
"def _get_daemon(self): return False def _set_daemon(self, value): pass daemon = property(_get_daemon, _set_daemon) #",
"is only a wrapper function, not a proper class. class GmapsProcessPool(multiprocessing.pool.Pool): Process =",
"always return False def _get_daemon(self): return False def _set_daemon(self, value): pass daemon =",
"# make 'daemon' attribute always return False def _get_daemon(self): return False def _set_daemon(self,",
"return False def _get_daemon(self): return False def _set_daemon(self, value): pass daemon = property(_get_daemon,",
"class GmapsProcess(multiprocessing.Process): # make 'daemon' attribute always return False def _get_daemon(self): return False",
"return False def _set_daemon(self, value): pass daemon = property(_get_daemon, _set_daemon) # We sub-class"
] |
[
"ITEM = typing.TypeVar('ITEM') FIELD = str VALUE = typing.TypeVar('VALUE') GETTER = typing.Callable[[ITEM, FIELD],",
"import typing ITEM = typing.TypeVar('ITEM') FIELD = str VALUE = typing.TypeVar('VALUE') GETTER =",
"typing ITEM = typing.TypeVar('ITEM') FIELD = str VALUE = typing.TypeVar('VALUE') GETTER = typing.Callable[[ITEM,",
"= typing.TypeVar('ITEM') FIELD = str VALUE = typing.TypeVar('VALUE') GETTER = typing.Callable[[ITEM, FIELD], VALUE]",
"<gh_stars>1-10 import typing ITEM = typing.TypeVar('ITEM') FIELD = str VALUE = typing.TypeVar('VALUE') GETTER"
] |
[
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,",
"MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"self, sreader, sreader.s.fileno(), sreader.e['peername']) try: request = b'' res = b'' while True:",
"class aMicroPyServer(object): def __init__(self, host=\"0.0.0.0\", port=80, backlog=5, timeout=20): \"\"\" Constructor \"\"\" self._host =",
"ms res = await asyncio.wait_for(sreader.readline(), self.timeout) request += res if res == b'\\r\\n':",
"IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED",
"\"Bad Request\", 403: \"Forbidden\", 404: \"Not Found\", 500: \"Internal Server Error\"} swriter.write(\"HTTP/1.0 \"",
"a browser. swriter.write(\"\\r\\n\") # end of HTTP header await swriter.drain() self._counter += 1",
"await swriter.drain() for header in extra_headers: swriter.write(header + \"\\r\\n\") ### await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\")",
"swriter.write(header + \"\\r\\n\") ### await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") # not required, vainglory swriter.write(\"Cache-Control: no-store\\r\\n\")",
"#print('lines', lines) if len(lines[0]) > 0: method = re.search(\"^([A-Z]+)\", lines[0]).group(1) for route in",
"@see https://github.com/troublegum/micropyserver The MIT License Copyright (c) 2019 troublegum. https://github.com/troublegum/micropyserver Permission is hereby",
"switch PIN ON and switch PIN OFF. # This prevents showing the cashed",
"server \"\"\" print('Awaiting client connection on {}:{}'.format(self._host, self._port)) self.server = await asyncio.start_server(self.run_client, self._host,",
"= await asyncio.start_server(self.run_client, self._host, self._port, self.backlog) while True: await asyncio.sleep(1) async def run_client(self,",
"this software and associated documentation files (the \"Software\"), to deal in the Software",
"OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE",
"if swriter.s.fileno() != -1: await self.send(swriter, \"Error: \" + str_error, status=500) def on_request(self,",
"SOFTWARE. \"\"\" import re import sys import io import utime import gc import",
"asyncio.wait_for(sreader.read(1024), self.timeout) if res[-4:] == b'\\r\\n\\r\\n': # end of HTTP request break '''",
"+ \"\\r\\n\") ### await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") # not required, vainglory swriter.write(\"Cache-Control: no-store\\r\\n\") #",
"re import sys import io import utime import gc import uasyncio as asyncio",
"== b'\\r\\n\\r\\n': # end of HTTP request break ''' # 150-250 ms request",
"\"$\", path) if match: return route return None ''' def find_route(self, request): \"\"\"",
"License Copyright (c) 2019 troublegum. https://github.com/troublegum/micropyserver Permission is hereby granted, free of charge,",
"# end of HTTP request break ''' # 150-250 ms request = await",
"not be stored in any cache. # This is necessary to execute the",
"print('raise OSError') raise OSError if request: request = str(request, \"utf8\") #print('request >>>{}<<<'.format(request)) try:",
"route return None ''' def find_route(self, request): \"\"\" Find route \"\"\" lines =",
"in extra_headers: swriter.write(header + \"\\r\\n\") ### await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") # not required, vainglory",
"action \"\"\" await self.send(swriter, \"404 Not found\", status=404) async def internal_error(self, swriter, error):",
"OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING",
"self._host = host self._port = port self._routes = [] self._on_request_handler = None self.backlog",
"CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR",
"print('Awaiting client connection on {}:{}'.format(self._host, self._port)) self.server = await asyncio.start_server(self.run_client, self._host, self._port, self.backlog)",
"error action \"\"\" output = io.StringIO() sys.print_exception(error, output) str_error = output.getvalue() output.close() if",
"\" + str_error, status=500) def on_request(self, handler): \"\"\" Set request handler \"\"\" self._on_request_handler",
"the Software without restriction, including without limitation the rights to use, copy, modify,",
"await asyncio.wait_for(sreader.read(1024), self.timeout) if res[-4:] == b'\\r\\n\\r\\n': # end of HTTP request break",
"person obtaining a copy of this software and associated documentation files (the \"Software\"),",
"for MicroPython projects. @see https://github.com/troublegum/micropyserver The MIT License Copyright (c) 2019 troublegum. https://github.com/troublegum/micropyserver",
"the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies",
"release. async def start(self): \"\"\" Start server \"\"\" print('Awaiting client connection on {}:{}'.format(self._host,",
"= output.getvalue() output.close() if swriter.s.fileno() != -1: await self.send(swriter, \"Error: \" + str_error,",
"import io import utime import gc import uasyncio as asyncio class aMicroPyServer(object): def",
"without restriction, including without limitation the rights to use, copy, modify, merge, publish,",
"merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit",
"\"\"\" self._host = host self._port = port self._routes = [] self._on_request_handler = None",
"> 0: method = re.search(\"^([A-Z]+)\", lines[0]).group(1) for route in self._routes: if method !=",
"route[\"handler\"](swriter, request) else: await self.not_found(swriter) #1/0 # test internal_error except Exception as e:",
"return None ''' def find_route(self, request): \"\"\" Find route \"\"\" lines = request.split(\"\\r\\n\")",
"swriter.drain() for header in extra_headers: swriter.write(header + \"\\r\\n\") ### await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") #",
"find_route(self, request): \"\"\" Find route \"\"\" method = re.search(\"^([A-Z]+)\", request).group(1) for route in",
"if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if path == route[\"path\"]:",
"\"\"\" self._routes.append({\"path\": path, \"handler\": handler, \"method\": method}) async def send(self, swriter, response, status=200,",
"''' # 450-550 ms res = await asyncio.wait_for(sreader.readline(), self.timeout) request += res if",
"= await asyncio.wait_for(sreader.readline(), self.timeout) request += res if res == b'\\r\\n': # end",
"code on the server: # switch PIN ON and switch PIN OFF. #",
"= port self._routes = [] self._on_request_handler = None self.backlog = backlog self.timeout =",
"in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED",
">>>{}<<<\".format(swriter.out_buf)) await swriter.drain() #print(\"Finished processing request.\") def find_route(self, request): \"\"\" Find route \"\"\"",
"ms res = await asyncio.wait_for(sreader.read(1024), self.timeout) print(res) request += res if request[-4:] ==",
"sublicense, and/or sell copies of the Software, and to permit persons to whom",
"this permission notice shall be included in all copies or substantial portions of",
"Start server \"\"\" print('Awaiting client connection on {}:{}'.format(self._host, self._port)) self.server = await asyncio.start_server(self.run_client,",
"modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to",
"= re.search(\"^([A-Z]+)\", lines[0]).group(1) for route in self._routes: if method != route[\"method\"]: continue path",
"''' except asyncio.TimeoutError as e: print(1, e, \"asyncio.TimeoutError\", self.timeout) res = b'' if",
"path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if path == route[\"path\"]: return route else: match =",
"ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT",
"route return None ''' async def not_found(self, swriter): \"\"\" Not found action \"\"\"",
"as e: print(2, e) self.internal_error(swriter, e) raise except OSError as e: print(3, e)",
"request) else: await self.not_found(swriter) #1/0 # test internal_error except Exception as e: print(2,",
"await self.send(swriter, \"Error: \" + str_error, status=500) def on_request(self, handler): \"\"\" Set request",
"as asyncio class aMicroPyServer(object): def __init__(self, host=\"0.0.0.0\", port=80, backlog=5, timeout=20): \"\"\" Constructor \"\"\"",
"# This is necessary to execute the code on the server: # switch",
"route[\"path\"] + \"$\", path) if match: return route return None ''' async def",
"WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT",
"async def run_client(self, sreader, swriter): self.start_time = utime.ticks_ms() #print('Got connection from client', self,",
"LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF",
"route \"\"\" self._routes.append({\"path\": path, \"handler\": handler, \"method\": method}) async def send(self, swriter, response,",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A",
"swriter.drain() #print(\"Finished processing request.\") def find_route(self, request): \"\"\" Find route \"\"\" method =",
"''' def find_route(self, request): \"\"\" Find route \"\"\" lines = request.split(\"\\r\\n\") #print('lines', lines)",
"route \"\"\" method = re.search(\"^([A-Z]+)\", request).group(1) for route in self._routes: if method !=",
"await self.send(swriter, \"404 Not found\", status=404) async def internal_error(self, swriter, error): \"\"\" Catch",
"\"\"\" await self.send(swriter, \"404 Not found\", status=404) async def internal_error(self, swriter, error): \"\"\"",
"if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if path == route[\"path\"]:",
"notice and this permission notice shall be included in all copies or substantial",
"backlog=5, timeout=20): \"\"\" Constructor \"\"\" self._host = host self._port = port self._routes =",
"await swriter.drain() #print(\"Finished processing request.\") def find_route(self, request): \"\"\" Find route \"\"\" method",
"path == route[\"path\"]: return route else: match = re.search(\"^\" + route[\"path\"] + \"$\",",
"if route: await route[\"handler\"](swriter, request) else: await self.not_found(swriter) #1/0 # test internal_error except",
"TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE",
"Copyright (c) 2019 troublegum. https://github.com/troublegum/micropyserver Permission is hereby granted, free of charge, to",
"OSError if request: request = str(request, \"utf8\") #print('request >>>{}<<<'.format(request)) try: route = self.find_route(request)",
"in any cache. # This is necessary to execute the code on the",
"print(3, e) pass swriter_s_fileno = swriter.s.fileno() await swriter.wait_closed() #print('Client socket closed.', self, swriter,",
"the cashed text # when a user presses the \"Backward/Forward\" button in a",
"\"\\r\\n\") ### await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") # not required, vainglory swriter.write(\"Cache-Control: no-store\\r\\n\") # The",
"b'\\r\\n\\r\\n': # end of HTTP request break ''' # 150-250 ms request =",
"USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import re import sys import",
"charge, to any person obtaining a copy of this software and associated documentation",
"if path == route[\"path\"]: return route else: match = re.search(\"^\" + route[\"path\"] +",
"MicroPyServer\\r\\n\") # not required, vainglory swriter.write(\"Cache-Control: no-store\\r\\n\") # The response may not be",
"PIN ON and switch PIN OFF. # This prevents showing the cashed text",
"= re.search(\"^\" + route[\"path\"] + \"$\", path) if match: return route return None",
"KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"found action \"\"\" await self.send(swriter, \"404 Not found\", status=404) async def internal_error(self, swriter,",
"+= res if request[-4:] == b'\\r\\n\\r\\n': # end of HTTP request break '''",
"if match: return route return None ''' def find_route(self, request): \"\"\" Find route",
"switch PIN OFF. # This prevents showing the cashed text # when a",
"def find_route(self, request): \"\"\" Find route \"\"\" method = re.search(\"^([A-Z]+)\", request).group(1) for route",
"error): \"\"\" Catch error action \"\"\" output = io.StringIO() sys.print_exception(error, output) str_error =",
"e: print(2, e) self.internal_error(swriter, e) raise except OSError as e: print(3, e) pass",
"request break ''' except asyncio.TimeoutError as e: print(1, e, \"asyncio.TimeoutError\", self.timeout) res =",
"request = str(request, \"utf8\") #print('request >>>{}<<<'.format(request)) try: route = self.find_route(request) if route: await",
"The response may not be stored in any cache. # This is necessary",
"response may not be stored in any cache. # This is necessary to",
"\"handler\": handler, \"method\": method}) async def send(self, swriter, response, status=200, content_type=\"Content-Type: text/plain\", extra_headers=[]):",
"print('Render time: %i ms' % utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect() #print('---------------------------------------------------------------') async def close(self): print('Closing",
"request break ''' # 150-250 ms request = await asyncio.wait_for(sreader.readline(), self.timeout) res =",
"path, handler, method=\"GET\"): \"\"\" Add new route \"\"\" self._routes.append({\"path\": path, \"handler\": handler, \"method\":",
"BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION",
"150-250 ms res = await asyncio.wait_for(sreader.read(1024), self.timeout) print(res) request += res if request[-4:]",
"2019 troublegum. https://github.com/troublegum/micropyserver Permission is hereby granted, free of charge, to any person",
"persons to whom the Software is furnished to do so, subject to the",
"status=200, content_type=\"Content-Type: text/plain\", extra_headers=[]): \"\"\" Send response to client \"\"\" if swriter is",
"\"$\", path) if match: return route return None ''' async def not_found(self, swriter):",
"+ \"$\", path) if match: return route return None ''' async def not_found(self,",
"time: %i ms' % utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect() #print('---------------------------------------------------------------') async def close(self): print('Closing server...')",
"text # when a user presses the \"Backward/Forward\" button in a browser. swriter.write(\"\\r\\n\")",
"Software is furnished to do so, subject to the following conditions: The above",
"IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY",
"raise except OSError as e: print(3, e) pass swriter_s_fileno = swriter.s.fileno() await swriter.wait_closed()",
"{200: \"OK\", 400: \"Bad Request\", 403: \"Forbidden\", 404: \"Not Found\", 500: \"Internal Server",
"def internal_error(self, swriter, error): \"\"\" Catch error action \"\"\" output = io.StringIO() sys.print_exception(error,",
"+ \"\\r\\n\" + \\ content_type + \"\\r\\n\") await swriter.drain() for header in extra_headers:",
"= str(request, \"utf8\") #print('request >>>{}<<<'.format(request)) try: route = self.find_route(request) if route: await route[\"handler\"](swriter,",
"path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if path == route[\"path\"]: return route else: match =",
"= {200: \"OK\", 400: \"Bad Request\", 403: \"Forbidden\", 404: \"Not Found\", 500: \"Internal",
"raise Exception(\"Can't send response, no connection instance\") status_message = {200: \"OK\", 400: \"Bad",
"NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND",
"#print('Client socket closed.', self, swriter, swriter_s_fileno, swriter.s.fileno(), swriter.e['peername']) print('Render time: %i ms' %",
"client', self, sreader, sreader.s.fileno(), sreader.e['peername']) try: request = b'' res = b'' while",
"output) str_error = output.getvalue() output.close() if swriter.s.fileno() != -1: await self.send(swriter, \"Error: \"",
"to deal in the Software without restriction, including without limitation the rights to",
"await asyncio.wait_for(sreader.read(1024), self.timeout) print(res) request += res if request[-4:] == b'\\r\\n\\r\\n': # end",
"path, \"handler\": handler, \"method\": method}) async def send(self, swriter, response, status=200, content_type=\"Content-Type: text/plain\",",
"lines = request.split(\"\\r\\n\") #print('lines', lines) if len(lines[0]) > 0: method = re.search(\"^([A-Z]+)\", lines[0]).group(1)",
"internal_error(self, swriter, error): \"\"\" Catch error action \"\"\" output = io.StringIO() sys.print_exception(error, output)",
"<gh_stars>0 \"\"\" MicroPyServer is a simple HTTP server for MicroPython projects. @see https://github.com/troublegum/micropyserver",
"request[-4:] == b'\\r\\n\\r\\n': # end of HTTP request break ''' # 150-250 ms",
"asyncio.sleep(1) async def run_client(self, sreader, swriter): self.start_time = utime.ticks_ms() #print('Got connection from client',",
"self.backlog) while True: await asyncio.sleep(1) async def run_client(self, sreader, swriter): self.start_time = utime.ticks_ms()",
"to whom the Software is furnished to do so, subject to the following",
"documentation files (the \"Software\"), to deal in the Software without restriction, including without",
"request += res if request[-4:] == b'\\r\\n\\r\\n': # end of HTTP request break",
"socket closed.', self, swriter, swriter_s_fileno, swriter.s.fileno(), swriter.e['peername']) print('Render time: %i ms' % utime.ticks_diff(utime.ticks_ms(),",
"= await asyncio.wait_for(sreader.read(1024), self.timeout) if res[-4:] == b'\\r\\n\\r\\n': # end of HTTP request",
"files (the \"Software\"), to deal in the Software without restriction, including without limitation",
"Software without restriction, including without limitation the rights to use, copy, modify, merge,",
"400: \"Bad Request\", 403: \"Forbidden\", 404: \"Not Found\", 500: \"Internal Server Error\"} swriter.write(\"HTTP/1.0",
"start(self): \"\"\" Start server \"\"\" print('Awaiting client connection on {}:{}'.format(self._host, self._port)) self.server =",
"server...') self.server.close() await self.server.wait_closed() print('Server is closed.') def add_route(self, path, handler, method=\"GET\"): \"\"\"",
"status_message = {200: \"OK\", 400: \"Bad Request\", 403: \"Forbidden\", 404: \"Not Found\", 500:",
"to do so, subject to the following conditions: The above copyright notice and",
"print('Closing server...') self.server.close() await self.server.wait_closed() print('Server is closed.') def add_route(self, path, handler, method=\"GET\"):",
"ms request = await asyncio.wait_for(sreader.readline(), self.timeout) res = await asyncio.wait_for(sreader.read(1024), self.timeout) if res[-4:]",
"await route[\"handler\"](swriter, request) else: await self.not_found(swriter) #1/0 # test internal_error except Exception as",
"swriter.wait_closed() #print('Client socket closed.', self, swriter, swriter_s_fileno, swriter.s.fileno(), swriter.e['peername']) print('Render time: %i ms'",
"instance\") status_message = {200: \"OK\", 400: \"Bad Request\", 403: \"Forbidden\", 404: \"Not Found\",",
"True: try: ''' # 450-550 ms res = await asyncio.wait_for(sreader.readline(), self.timeout) request +=",
"in the Software without restriction, including without limitation the rights to use, copy,",
"if res[-4:] == b'\\r\\n\\r\\n': # end of HTTP request break ''' except asyncio.TimeoutError",
"server: # switch PIN ON and switch PIN OFF. # This prevents showing",
"is None: raise Exception(\"Can't send response, no connection instance\") status_message = {200: \"OK\",",
"add_route(self, path, handler, method=\"GET\"): \"\"\" Add new route \"\"\" self._routes.append({\"path\": path, \"handler\": handler,",
"\"\"\" Add new route \"\"\" self._routes.append({\"path\": path, \"handler\": handler, \"method\": method}) async def",
"the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,",
"to any person obtaining a copy of this software and associated documentation files",
"AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN",
"HTTP request break ''' except asyncio.TimeoutError as e: print(1, e, \"asyncio.TimeoutError\", self.timeout) res",
"res == b'\\r\\n': # end of HTTP request break ''' # 150-250 ms",
"method = re.search(\"^([A-Z]+)\", lines[0]).group(1) for route in self._routes: if method != route[\"method\"]: continue",
"Not found\", status=404) async def internal_error(self, swriter, error): \"\"\" Catch error action \"\"\"",
"response, no connection instance\") status_message = {200: \"OK\", 400: \"Bad Request\", 403: \"Forbidden\",",
"swriter.write(\"\\r\\n\") # end of HTTP header await swriter.drain() self._counter += 1 swriter.write(str(self._counter) +",
"no-store\\r\\n\") # The response may not be stored in any cache. # This",
"# end of HTTP request break ''' except asyncio.TimeoutError as e: print(1, e,",
"print(res) request += res if request[-4:] == b'\\r\\n\\r\\n': # end of HTTP request",
"if res == b'': # socket connection broken print('raise OSError') raise OSError if",
"route: await route[\"handler\"](swriter, request) else: await self.not_found(swriter) #1/0 # test internal_error except Exception",
"a copy of this software and associated documentation files (the \"Software\"), to deal",
"Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS",
"\"Forbidden\", 404: \"Not Found\", 500: \"Internal Server Error\"} swriter.write(\"HTTP/1.0 \" + str(status) +",
"sreader, swriter): self.start_time = utime.ticks_ms() #print('Got connection from client', self, sreader, sreader.s.fileno(), sreader.e['peername'])",
"OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN",
"action \"\"\" output = io.StringIO() sys.print_exception(error, output) str_error = output.getvalue() output.close() if swriter.s.fileno()",
"+ \\ content_type + \"\\r\\n\") await swriter.drain() for header in extra_headers: swriter.write(header +",
"from client', self, sreader, sreader.s.fileno(), sreader.e['peername']) try: request = b'' res = b''",
"a user presses the \"Backward/Forward\" button in a browser. swriter.write(\"\\r\\n\") # end of",
"try: route = self.find_route(request) if route: await route[\"handler\"](swriter, request) else: await self.not_found(swriter) #1/0",
"1 swriter.write(str(self._counter) + '\\r\\n') swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await swriter.drain() #print(\"Finished processing request.\") def",
"== route[\"path\"]: return route else: match = re.search(\"^\" + route[\"path\"] + \"$\", path)",
"WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF",
"output = io.StringIO() sys.print_exception(error, output) str_error = output.getvalue() output.close() if swriter.s.fileno() != -1:",
"host=\"0.0.0.0\", port=80, backlog=5, timeout=20): \"\"\" Constructor \"\"\" self._host = host self._port = port",
"= self.find_route(request) if route: await route[\"handler\"](swriter, request) else: await self.not_found(swriter) #1/0 # test",
"print('Server is closed.') def add_route(self, path, handler, method=\"GET\"): \"\"\" Add new route \"\"\"",
"await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") # not required, vainglory swriter.write(\"Cache-Control: no-store\\r\\n\") # The response may",
"the server: # switch PIN ON and switch PIN OFF. # This prevents",
"free of charge, to any person obtaining a copy of this software and",
"and this permission notice shall be included in all copies or substantial portions",
"and to permit persons to whom the Software is furnished to do so,",
"# Remove it in the production release. async def start(self): \"\"\" Start server",
"await swriter.drain() self._counter += 1 swriter.write(str(self._counter) + '\\r\\n') swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await swriter.drain()",
"is necessary to execute the code on the server: # switch PIN ON",
"\"\\r\\n\") await swriter.drain() for header in extra_headers: swriter.write(header + \"\\r\\n\") ### await swriter.write(\"X-Powered-By:",
"def not_found(self, swriter): \"\"\" Not found action \"\"\" await self.send(swriter, \"404 Not found\",",
"+ \"$\", path) if match: return route return None ''' def find_route(self, request):",
"rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of",
"internal_error except Exception as e: print(2, e) self.internal_error(swriter, e) raise except OSError as",
"send response, no connection instance\") status_message = {200: \"OK\", 400: \"Bad Request\", 403:",
"it in the production release. async def start(self): \"\"\" Start server \"\"\" print('Awaiting",
"!= -1: await self.send(swriter, \"Error: \" + str_error, status=500) def on_request(self, handler): \"\"\"",
"processing request.\") def find_route(self, request): \"\"\" Find route \"\"\" method = re.search(\"^([A-Z]+)\", request).group(1)",
"% utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect() #print('---------------------------------------------------------------') async def close(self): print('Closing server...') self.server.close() await self.server.wait_closed()",
"except OSError as e: print(3, e) pass swriter_s_fileno = swriter.s.fileno() await swriter.wait_closed() #print('Client",
"HTTP request break ''' # 150-250 ms res = await asyncio.wait_for(sreader.read(1024), self.timeout) print(res)",
"res == b'': # socket connection broken print('raise OSError') raise OSError if request:",
"for route in self._routes: if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1)",
"EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES",
"and switch PIN OFF. # This prevents showing the cashed text # when",
"utime import gc import uasyncio as asyncio class aMicroPyServer(object): def __init__(self, host=\"0.0.0.0\", port=80,",
"\"\"\" Send response to client \"\"\" if swriter is None: raise Exception(\"Can't send",
"b'' while True: try: ''' # 450-550 ms res = await asyncio.wait_for(sreader.readline(), self.timeout)",
"self.timeout) print(res) request += res if request[-4:] == b'\\r\\n\\r\\n': # end of HTTP",
"may not be stored in any cache. # This is necessary to execute",
"\"asyncio.TimeoutError\", self.timeout) res = b'' if res == b'': # socket connection broken",
"swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") # not required, vainglory swriter.write(\"Cache-Control: no-store\\r\\n\") # The response may not",
"# switch PIN ON and switch PIN OFF. # This prevents showing the",
"associated documentation files (the \"Software\"), to deal in the Software without restriction, including",
"\"OK\", 400: \"Bad Request\", 403: \"Forbidden\", 404: \"Not Found\", 500: \"Internal Server Error\"}",
"BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE",
"aMicroPyServer(object): def __init__(self, host=\"0.0.0.0\", port=80, backlog=5, timeout=20): \"\"\" Constructor \"\"\" self._host = host",
"SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,",
"== b'\\r\\n\\r\\n': # end of HTTP request break ''' except asyncio.TimeoutError as e:",
"\"method\": method}) async def send(self, swriter, response, status=200, content_type=\"Content-Type: text/plain\", extra_headers=[]): \"\"\" Send",
"\"\"\" Constructor \"\"\" self._host = host self._port = port self._routes = [] self._on_request_handler",
"notice shall be included in all copies or substantial portions of the Software.",
"def __init__(self, host=\"0.0.0.0\", port=80, backlog=5, timeout=20): \"\"\" Constructor \"\"\" self._host = host self._port",
"swriter.s.fileno() != -1: await self.send(swriter, \"Error: \" + str_error, status=500) def on_request(self, handler):",
"output.close() if swriter.s.fileno() != -1: await self.send(swriter, \"Error: \" + str_error, status=500) def",
"except Exception as e: print(2, e) self.internal_error(swriter, e) raise except OSError as e:",
"= timeout self._counter = 0 # Remove it in the production release. async",
"await asyncio.wait_for(sreader.readline(), self.timeout) res = await asyncio.wait_for(sreader.read(1024), self.timeout) if res[-4:] == b'\\r\\n\\r\\n': #",
"swriter.s.fileno(), swriter.e['peername']) print('Render time: %i ms' % utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect() #print('---------------------------------------------------------------') async def",
"necessary to execute the code on the server: # switch PIN ON and",
"https://github.com/troublegum/micropyserver Permission is hereby granted, free of charge, to any person obtaining a",
"not required, vainglory swriter.write(\"Cache-Control: no-store\\r\\n\") # The response may not be stored in",
"str_error = output.getvalue() output.close() if swriter.s.fileno() != -1: await self.send(swriter, \"Error: \" +",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT",
"copy of this software and associated documentation files (the \"Software\"), to deal in",
"on {}:{}'.format(self._host, self._port)) self.server = await asyncio.start_server(self.run_client, self._host, self._port, self.backlog) while True: await",
"b'\\r\\n': # end of HTTP request break ''' # 150-250 ms res =",
"end of HTTP request break ''' # 150-250 ms res = await asyncio.wait_for(sreader.read(1024),",
"try: ''' # 450-550 ms res = await asyncio.wait_for(sreader.readline(), self.timeout) request += res",
"substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY",
"#print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await swriter.drain() #print(\"Finished processing request.\") def find_route(self, request): \"\"\" Find route",
"None ''' def find_route(self, request): \"\"\" Find route \"\"\" lines = request.split(\"\\r\\n\") #print('lines',",
"ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION",
"= swriter.s.fileno() await swriter.wait_closed() #print('Client socket closed.', self, swriter, swriter_s_fileno, swriter.s.fileno(), swriter.e['peername']) print('Render",
"self._routes = [] self._on_request_handler = None self.backlog = backlog self.timeout = timeout self._counter",
"of HTTP header await swriter.drain() self._counter += 1 swriter.write(str(self._counter) + '\\r\\n') swriter.write(response) #print(\"swriter.out_buf",
"res if res == b'\\r\\n': # end of HTTP request break ''' #",
"required, vainglory swriter.write(\"Cache-Control: no-store\\r\\n\") # The response may not be stored in any",
"break ''' # 150-250 ms res = await asyncio.wait_for(sreader.read(1024), self.timeout) print(res) request +=",
"obtaining a copy of this software and associated documentation files (the \"Software\"), to",
"[] self._on_request_handler = None self.backlog = backlog self.timeout = timeout self._counter = 0",
"\"\"\" MicroPyServer is a simple HTTP server for MicroPython projects. @see https://github.com/troublegum/micropyserver The",
"swriter, response, status=200, content_type=\"Content-Type: text/plain\", extra_headers=[]): \"\"\" Send response to client \"\"\" if",
"Server Error\"} swriter.write(\"HTTP/1.0 \" + str(status) + \" \" + status_message[status] + \"\\r\\n\"",
"self.send(swriter, \"Error: \" + str_error, status=500) def on_request(self, handler): \"\"\" Set request handler",
"TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN",
"route in self._routes: if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if",
"route else: match = re.search(\"^\" + route[\"path\"] + \"$\", path) if match: return",
"self.timeout) request += res if res == b'\\r\\n': # end of HTTP request",
"+ status_message[status] + \"\\r\\n\" + \\ content_type + \"\\r\\n\") await swriter.drain() for header",
"0: method = re.search(\"^([A-Z]+)\", lines[0]).group(1) for route in self._routes: if method != route[\"method\"]:",
"Exception(\"Can't send response, no connection instance\") status_message = {200: \"OK\", 400: \"Bad Request\",",
"host self._port = port self._routes = [] self._on_request_handler = None self.backlog = backlog",
"swriter.drain() self._counter += 1 swriter.write(str(self._counter) + '\\r\\n') swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await swriter.drain() #print(\"Finished",
"OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR",
"re.search(\"^\" + route[\"path\"] + \"$\", path) if match: return route return None '''",
"to execute the code on the server: # switch PIN ON and switch",
"\" + str(status) + \" \" + status_message[status] + \"\\r\\n\" + \\ content_type",
"troublegum. https://github.com/troublegum/micropyserver Permission is hereby granted, free of charge, to any person obtaining",
"IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR",
"end of HTTP request break ''' except asyncio.TimeoutError as e: print(1, e, \"asyncio.TimeoutError\",",
"OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
"This is necessary to execute the code on the server: # switch PIN",
"publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons",
"server for MicroPython projects. @see https://github.com/troublegum/micropyserver The MIT License Copyright (c) 2019 troublegum.",
"def run_client(self, sreader, swriter): self.start_time = utime.ticks_ms() #print('Got connection from client', self, sreader,",
"try: request = b'' res = b'' while True: try: ''' # 450-550",
"= b'' if res == b'': # socket connection broken print('raise OSError') raise",
"including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,",
"Request\", 403: \"Forbidden\", 404: \"Not Found\", 500: \"Internal Server Error\"} swriter.write(\"HTTP/1.0 \" +",
"utime.ticks_ms() #print('Got connection from client', self, sreader, sreader.s.fileno(), sreader.e['peername']) try: request = b''",
"or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT",
"import re import sys import io import utime import gc import uasyncio as",
"self.timeout) if res[-4:] == b'\\r\\n\\r\\n': # end of HTTP request break ''' except",
"b'' if res == b'': # socket connection broken print('raise OSError') raise OSError",
"b'': # socket connection broken print('raise OSError') raise OSError if request: request =",
"print(2, e) self.internal_error(swriter, e) raise except OSError as e: print(3, e) pass swriter_s_fileno",
"as e: print(3, e) pass swriter_s_fileno = swriter.s.fileno() await swriter.wait_closed() #print('Client socket closed.',",
"except asyncio.TimeoutError as e: print(1, e, \"asyncio.TimeoutError\", self.timeout) res = b'' if res",
"str(request, \"utf8\") #print('request >>>{}<<<'.format(request)) try: route = self.find_route(request) if route: await route[\"handler\"](swriter, request)",
"THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import",
"button in a browser. swriter.write(\"\\r\\n\") # end of HTTP header await swriter.drain() self._counter",
"all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS",
"SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR",
"request = b'' res = b'' while True: try: ''' # 450-550 ms",
"route \"\"\" lines = request.split(\"\\r\\n\") #print('lines', lines) if len(lines[0]) > 0: method =",
"swriter): self.start_time = utime.ticks_ms() #print('Got connection from client', self, sreader, sreader.s.fileno(), sreader.e['peername']) try:",
"lines) if len(lines[0]) > 0: method = re.search(\"^([A-Z]+)\", lines[0]).group(1) for route in self._routes:",
"500: \"Internal Server Error\"} swriter.write(\"HTTP/1.0 \" + str(status) + \" \" + status_message[status]",
"Exception as e: print(2, e) self.internal_error(swriter, e) raise except OSError as e: print(3,",
"return route else: match = re.search(\"^\" + route[\"path\"] + \"$\", path) if match:",
"import gc import uasyncio as asyncio class aMicroPyServer(object): def __init__(self, host=\"0.0.0.0\", port=80, backlog=5,",
"asyncio class aMicroPyServer(object): def __init__(self, host=\"0.0.0.0\", port=80, backlog=5, timeout=20): \"\"\" Constructor \"\"\" self._host",
"True: await asyncio.sleep(1) async def run_client(self, sreader, swriter): self.start_time = utime.ticks_ms() #print('Got connection",
"cashed text # when a user presses the \"Backward/Forward\" button in a browser.",
"swriter.write(str(self._counter) + '\\r\\n') swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await swriter.drain() #print(\"Finished processing request.\") def find_route(self,",
"path) if match: return route return None ''' async def not_found(self, swriter): \"\"\"",
"on the server: # switch PIN ON and switch PIN OFF. # This",
"OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES",
"of HTTP request break ''' # 150-250 ms res = await asyncio.wait_for(sreader.read(1024), self.timeout)",
"broken print('raise OSError') raise OSError if request: request = str(request, \"utf8\") #print('request >>>{}<<<'.format(request))",
"content_type + \"\\r\\n\") await swriter.drain() for header in extra_headers: swriter.write(header + \"\\r\\n\") ###",
"io.StringIO() sys.print_exception(error, output) str_error = output.getvalue() output.close() if swriter.s.fileno() != -1: await self.send(swriter,",
"res = b'' if res == b'': # socket connection broken print('raise OSError')",
"execute the code on the server: # switch PIN ON and switch PIN",
"method = re.search(\"^([A-Z]+)\", request).group(1) for route in self._routes: if method != route[\"method\"]: continue",
"\"utf8\") #print('request >>>{}<<<'.format(request)) try: route = self.find_route(request) if route: await route[\"handler\"](swriter, request) else:",
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the",
"await asyncio.start_server(self.run_client, self._host, self._port, self.backlog) while True: await asyncio.sleep(1) async def run_client(self, sreader,",
"e, \"asyncio.TimeoutError\", self.timeout) res = b'' if res == b'': # socket connection",
"OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import re import sys import io",
"COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN",
"ms' % utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect() #print('---------------------------------------------------------------') async def close(self): print('Closing server...') self.server.close() await",
"{}:{}'.format(self._host, self._port)) self.server = await asyncio.start_server(self.run_client, self._host, self._port, self.backlog) while True: await asyncio.sleep(1)",
"None: raise Exception(\"Can't send response, no connection instance\") status_message = {200: \"OK\", 400:",
"self.server = await asyncio.start_server(self.run_client, self._host, self._port, self.backlog) while True: await asyncio.sleep(1) async def",
"self.start_time)) gc.collect() #print('---------------------------------------------------------------') async def close(self): print('Closing server...') self.server.close() await self.server.wait_closed() print('Server is",
"ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE",
"self._port)) self.server = await asyncio.start_server(self.run_client, self._host, self._port, self.backlog) while True: await asyncio.sleep(1) async",
"OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL",
"= await asyncio.wait_for(sreader.read(1024), self.timeout) print(res) request += res if request[-4:] == b'\\r\\n\\r\\n': #",
"\"Error: \" + str_error, status=500) def on_request(self, handler): \"\"\" Set request handler \"\"\"",
"above copyright notice and this permission notice shall be included in all copies",
"sys import io import utime import gc import uasyncio as asyncio class aMicroPyServer(object):",
"MicroPyServer is a simple HTTP server for MicroPython projects. @see https://github.com/troublegum/micropyserver The MIT",
"continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if path == route[\"path\"]: return route else: match",
"\"\"\" if swriter is None: raise Exception(\"Can't send response, no connection instance\") status_message",
"self.server.close() await self.server.wait_closed() print('Server is closed.') def add_route(self, path, handler, method=\"GET\"): \"\"\" Add",
"WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE",
"res = await asyncio.wait_for(sreader.readline(), self.timeout) request += res if res == b'\\r\\n': #",
"This prevents showing the cashed text # when a user presses the \"Backward/Forward\"",
"150-250 ms request = await asyncio.wait_for(sreader.readline(), self.timeout) res = await asyncio.wait_for(sreader.read(1024), self.timeout) if",
"self._host, self._port, self.backlog) while True: await asyncio.sleep(1) async def run_client(self, sreader, swriter): self.start_time",
"self.timeout) res = await asyncio.wait_for(sreader.read(1024), self.timeout) if res[-4:] == b'\\r\\n\\r\\n': # end of",
"### await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") # not required, vainglory swriter.write(\"Cache-Control: no-store\\r\\n\") # The response",
"# socket connection broken print('raise OSError') raise OSError if request: request = str(request,",
"swriter is None: raise Exception(\"Can't send response, no connection instance\") status_message = {200:",
"PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS",
"permission notice shall be included in all copies or substantial portions of the",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS",
"content_type=\"Content-Type: text/plain\", extra_headers=[]): \"\"\" Send response to client \"\"\" if swriter is None:",
"projects. @see https://github.com/troublegum/micropyserver The MIT License Copyright (c) 2019 troublegum. https://github.com/troublegum/micropyserver Permission is",
"def add_route(self, path, handler, method=\"GET\"): \"\"\" Add new route \"\"\" self._routes.append({\"path\": path, \"handler\":",
"\"Backward/Forward\" button in a browser. swriter.write(\"\\r\\n\") # end of HTTP header await swriter.drain()",
"response, status=200, content_type=\"Content-Type: text/plain\", extra_headers=[]): \"\"\" Send response to client \"\"\" if swriter",
"OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH",
"await asyncio.sleep(1) async def run_client(self, sreader, swriter): self.start_time = utime.ticks_ms() #print('Got connection from",
"def find_route(self, request): \"\"\" Find route \"\"\" lines = request.split(\"\\r\\n\") #print('lines', lines) if",
"b'' res = b'' while True: try: ''' # 450-550 ms res =",
"async def internal_error(self, swriter, error): \"\"\" Catch error action \"\"\" output = io.StringIO()",
"the following conditions: The above copyright notice and this permission notice shall be",
"await swriter.wait_closed() #print('Client socket closed.', self, swriter, swriter_s_fileno, swriter.s.fileno(), swriter.e['peername']) print('Render time: %i",
"#print(\"Finished processing request.\") def find_route(self, request): \"\"\" Find route \"\"\" method = re.search(\"^([A-Z]+)\",",
"swriter.s.fileno() await swriter.wait_closed() #print('Client socket closed.', self, swriter, swriter_s_fileno, swriter.s.fileno(), swriter.e['peername']) print('Render time:",
"e: print(1, e, \"asyncio.TimeoutError\", self.timeout) res = b'' if res == b'': #",
"async def start(self): \"\"\" Start server \"\"\" print('Awaiting client connection on {}:{}'.format(self._host, self._port))",
"text/plain\", extra_headers=[]): \"\"\" Send response to client \"\"\" if swriter is None: raise",
"= b'' res = b'' while True: try: ''' # 450-550 ms res",
"timeout=20): \"\"\" Constructor \"\"\" self._host = host self._port = port self._routes = []",
"request).group(1) if path == route[\"path\"]: return route else: match = re.search(\"^\" + route[\"path\"]",
"in self._routes: if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if path",
"\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT",
"swriter, swriter_s_fileno, swriter.s.fileno(), swriter.e['peername']) print('Render time: %i ms' % utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect() #print('---------------------------------------------------------------')",
"furnished to do so, subject to the following conditions: The above copyright notice",
"new route \"\"\" self._routes.append({\"path\": path, \"handler\": handler, \"method\": method}) async def send(self, swriter,",
"\"\"\" Not found action \"\"\" await self.send(swriter, \"404 Not found\", status=404) async def",
"450-550 ms res = await asyncio.wait_for(sreader.readline(), self.timeout) request += res if res ==",
"= [] self._on_request_handler = None self.backlog = backlog self.timeout = timeout self._counter =",
"permit persons to whom the Software is furnished to do so, subject to",
"OTHER DEALINGS IN THE SOFTWARE. \"\"\" import re import sys import io import",
"any person obtaining a copy of this software and associated documentation files (the",
"\"\"\" Catch error action \"\"\" output = io.StringIO() sys.print_exception(error, output) str_error = output.getvalue()",
"else: await self.not_found(swriter) #1/0 # test internal_error except Exception as e: print(2, e)",
"copies of the Software, and to permit persons to whom the Software is",
"swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await swriter.drain() #print(\"Finished processing request.\") def find_route(self, request): \"\"\" Find",
"send(self, swriter, response, status=200, content_type=\"Content-Type: text/plain\", extra_headers=[]): \"\"\" Send response to client \"\"\"",
"CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.",
"included in all copies or substantial portions of the Software. THE SOFTWARE IS",
"''' # 150-250 ms res = await asyncio.wait_for(sreader.read(1024), self.timeout) print(res) request += res",
"route[\"path\"]: return route else: match = re.search(\"^\" + route[\"path\"] + \"$\", path) if",
"Catch error action \"\"\" output = io.StringIO() sys.print_exception(error, output) str_error = output.getvalue() output.close()",
"swriter): \"\"\" Not found action \"\"\" await self.send(swriter, \"404 Not found\", status=404) async",
"copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and",
"swriter.write(\"Cache-Control: no-store\\r\\n\") # The response may not be stored in any cache. #",
"THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"Not found action \"\"\" await self.send(swriter, \"404 Not found\", status=404) async def internal_error(self,",
"# test internal_error except Exception as e: print(2, e) self.internal_error(swriter, e) raise except",
"404: \"Not Found\", 500: \"Internal Server Error\"} swriter.write(\"HTTP/1.0 \" + str(status) + \"",
"if request: request = str(request, \"utf8\") #print('request >>>{}<<<'.format(request)) try: route = self.find_route(request) if",
"presses the \"Backward/Forward\" button in a browser. swriter.write(\"\\r\\n\") # end of HTTP header",
"end of HTTP request break ''' # 150-250 ms request = await asyncio.wait_for(sreader.readline(),",
"OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import re import",
"the Software, and to permit persons to whom the Software is furnished to",
"if request[-4:] == b'\\r\\n\\r\\n': # end of HTTP request break ''' # 150-250",
"+ str_error, status=500) def on_request(self, handler): \"\"\" Set request handler \"\"\" self._on_request_handler =",
"HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN",
"\"\"\" lines = request.split(\"\\r\\n\") #print('lines', lines) if len(lines[0]) > 0: method = re.search(\"^([A-Z]+)\",",
"= re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if path == route[\"path\"]: return route else: match = re.search(\"^\"",
"use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,",
"self._counter += 1 swriter.write(str(self._counter) + '\\r\\n') swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await swriter.drain() #print(\"Finished processing",
"\"Internal Server Error\"} swriter.write(\"HTTP/1.0 \" + str(status) + \" \" + status_message[status] +",
"break ''' except asyncio.TimeoutError as e: print(1, e, \"asyncio.TimeoutError\", self.timeout) res = b''",
"while True: try: ''' # 450-550 ms res = await asyncio.wait_for(sreader.readline(), self.timeout) request",
"following conditions: The above copyright notice and this permission notice shall be included",
"Add new route \"\"\" self._routes.append({\"path\": path, \"handler\": handler, \"method\": method}) async def send(self,",
"OSError') raise OSError if request: request = str(request, \"utf8\") #print('request >>>{}<<<'.format(request)) try: route",
"%i ms' % utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect() #print('---------------------------------------------------------------') async def close(self): print('Closing server...') self.server.close()",
"!= route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if path == route[\"path\"]: return route",
"''' async def not_found(self, swriter): \"\"\" Not found action \"\"\" await self.send(swriter, \"404",
"self.internal_error(swriter, e) raise except OSError as e: print(3, e) pass swriter_s_fileno = swriter.s.fileno()",
"request): \"\"\" Find route \"\"\" method = re.search(\"^([A-Z]+)\", request).group(1) for route in self._routes:",
"cache. # This is necessary to execute the code on the server: #",
"print(1, e, \"asyncio.TimeoutError\", self.timeout) res = b'' if res == b'': # socket",
"\"Not Found\", 500: \"Internal Server Error\"} swriter.write(\"HTTP/1.0 \" + str(status) + \" \"",
"copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\",",
"# not required, vainglory swriter.write(\"Cache-Control: no-store\\r\\n\") # The response may not be stored",
"NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR",
"self.send(swriter, \"404 Not found\", status=404) async def internal_error(self, swriter, error): \"\"\" Catch error",
"match: return route return None ''' async def not_found(self, swriter): \"\"\" Not found",
"The above copyright notice and this permission notice shall be included in all",
"+ route[\"path\"] + \"$\", path) if match: return route return None ''' def",
"!= route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if path == route[\"path\"]: return route",
"production release. async def start(self): \"\"\" Start server \"\"\" print('Awaiting client connection on",
"# 150-250 ms request = await asyncio.wait_for(sreader.readline(), self.timeout) res = await asyncio.wait_for(sreader.read(1024), self.timeout)",
"connection instance\") status_message = {200: \"OK\", 400: \"Bad Request\", 403: \"Forbidden\", 404: \"Not",
"for header in extra_headers: swriter.write(header + \"\\r\\n\") ### await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") # not",
"sreader.e['peername']) try: request = b'' res = b'' while True: try: ''' #",
"raise OSError if request: request = str(request, \"utf8\") #print('request >>>{}<<<'.format(request)) try: route =",
"HTTP request break ''' # 150-250 ms request = await asyncio.wait_for(sreader.readline(), self.timeout) res",
"\"Software\"), to deal in the Software without restriction, including without limitation the rights",
"deal in the Software without restriction, including without limitation the rights to use,",
"prevents showing the cashed text # when a user presses the \"Backward/Forward\" button",
"\"\"\" import re import sys import io import utime import gc import uasyncio",
"granted, free of charge, to any person obtaining a copy of this software",
"method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if path == route[\"path\"]: return",
"limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
"= 0 # Remove it in the production release. async def start(self): \"\"\"",
"if res == b'\\r\\n': # end of HTTP request break ''' # 150-250",
"self._on_request_handler = None self.backlog = backlog self.timeout = timeout self._counter = 0 #",
"asyncio.wait_for(sreader.readline(), self.timeout) request += res if res == b'\\r\\n': # end of HTTP",
"SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import re",
"sys.print_exception(error, output) str_error = output.getvalue() output.close() if swriter.s.fileno() != -1: await self.send(swriter, \"Error:",
"path) if match: return route return None ''' def find_route(self, request): \"\"\" Find",
"route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if path == route[\"path\"]: return route else:",
"AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE",
"Find route \"\"\" method = re.search(\"^([A-Z]+)\", request).group(1) for route in self._routes: if method",
"vainglory swriter.write(\"Cache-Control: no-store\\r\\n\") # The response may not be stored in any cache.",
"ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF",
"of this software and associated documentation files (the \"Software\"), to deal in the",
"'\\r\\n') swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await swriter.drain() #print(\"Finished processing request.\") def find_route(self, request): \"\"\"",
"WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO",
"\"404 Not found\", status=404) async def internal_error(self, swriter, error): \"\"\" Catch error action",
"# end of HTTP request break ''' # 150-250 ms res = await",
"swriter_s_fileno = swriter.s.fileno() await swriter.wait_closed() #print('Client socket closed.', self, swriter, swriter_s_fileno, swriter.s.fileno(), swriter.e['peername'])",
"asyncio.start_server(self.run_client, self._host, self._port, self.backlog) while True: await asyncio.sleep(1) async def run_client(self, sreader, swriter):",
"sell copies of the Software, and to permit persons to whom the Software",
"client \"\"\" if swriter is None: raise Exception(\"Can't send response, no connection instance\")",
"if len(lines[0]) > 0: method = re.search(\"^([A-Z]+)\", lines[0]).group(1) for route in self._routes: if",
"= await asyncio.wait_for(sreader.readline(), self.timeout) res = await asyncio.wait_for(sreader.read(1024), self.timeout) if res[-4:] == b'\\r\\n\\r\\n':",
"the production release. async def start(self): \"\"\" Start server \"\"\" print('Awaiting client connection",
"output.getvalue() output.close() if swriter.s.fileno() != -1: await self.send(swriter, \"Error: \" + str_error, status=500)",
"port=80, backlog=5, timeout=20): \"\"\" Constructor \"\"\" self._host = host self._port = port self._routes",
"async def close(self): print('Closing server...') self.server.close() await self.server.wait_closed() print('Server is closed.') def add_route(self,",
"break ''' # 150-250 ms request = await asyncio.wait_for(sreader.readline(), self.timeout) res = await",
"+ route[\"path\"] + \"$\", path) if match: return route return None ''' async",
">>>{}<<<'.format(request)) try: route = self.find_route(request) if route: await route[\"handler\"](swriter, request) else: await self.not_found(swriter)",
"extra_headers: swriter.write(header + \"\\r\\n\") ### await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") # not required, vainglory swriter.write(\"Cache-Control:",
"OSError as e: print(3, e) pass swriter_s_fileno = swriter.s.fileno() await swriter.wait_closed() #print('Client socket",
"do so, subject to the following conditions: The above copyright notice and this",
"# 150-250 ms res = await asyncio.wait_for(sreader.read(1024), self.timeout) print(res) request += res if",
"response to client \"\"\" if swriter is None: raise Exception(\"Can't send response, no",
"MIT License Copyright (c) 2019 troublegum. https://github.com/troublegum/micropyserver Permission is hereby granted, free of",
"self._port, self.backlog) while True: await asyncio.sleep(1) async def run_client(self, sreader, swriter): self.start_time =",
"self.not_found(swriter) #1/0 # test internal_error except Exception as e: print(2, e) self.internal_error(swriter, e)",
"status=404) async def internal_error(self, swriter, error): \"\"\" Catch error action \"\"\" output =",
"str(status) + \" \" + status_message[status] + \"\\r\\n\" + \\ content_type + \"\\r\\n\")",
"gc import uasyncio as asyncio class aMicroPyServer(object): def __init__(self, host=\"0.0.0.0\", port=80, backlog=5, timeout=20):",
"lines[0]).group(1) for route in self._routes: if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\",",
"async def not_found(self, swriter): \"\"\" Not found action \"\"\" await self.send(swriter, \"404 Not",
"is furnished to do so, subject to the following conditions: The above copyright",
"in self._routes: if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if path",
"Constructor \"\"\" self._host = host self._port = port self._routes = [] self._on_request_handler =",
"\"\"\" Start server \"\"\" print('Awaiting client connection on {}:{}'.format(self._host, self._port)) self.server = await",
"return route return None ''' async def not_found(self, swriter): \"\"\" Not found action",
"self._routes.append({\"path\": path, \"handler\": handler, \"method\": method}) async def send(self, swriter, response, status=200, content_type=\"Content-Type:",
"OFF. # This prevents showing the cashed text # when a user presses",
"self._routes: if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if path ==",
"await self.server.wait_closed() print('Server is closed.') def add_route(self, path, handler, method=\"GET\"): \"\"\" Add new",
"\"\\r\\n\" + \\ content_type + \"\\r\\n\") await swriter.drain() for header in extra_headers: swriter.write(header",
"self.server.wait_closed() print('Server is closed.') def add_route(self, path, handler, method=\"GET\"): \"\"\" Add new route",
"so, subject to the following conditions: The above copyright notice and this permission",
"return None ''' async def not_found(self, swriter): \"\"\" Not found action \"\"\" await",
"== b'\\r\\n': # end of HTTP request break ''' # 150-250 ms res",
"when a user presses the \"Backward/Forward\" button in a browser. swriter.write(\"\\r\\n\") # end",
"header await swriter.drain() self._counter += 1 swriter.write(str(self._counter) + '\\r\\n') swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await",
"a simple HTTP server for MicroPython projects. @see https://github.com/troublegum/micropyserver The MIT License Copyright",
"the code on the server: # switch PIN ON and switch PIN OFF.",
"run_client(self, sreader, swriter): self.start_time = utime.ticks_ms() #print('Got connection from client', self, sreader, sreader.s.fileno(),",
"FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR",
"self._counter = 0 # Remove it in the production release. async def start(self):",
"in the production release. async def start(self): \"\"\" Start server \"\"\" print('Awaiting client",
"INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR",
"def start(self): \"\"\" Start server \"\"\" print('Awaiting client connection on {}:{}'.format(self._host, self._port)) self.server",
"# The response may not be stored in any cache. # This is",
"await asyncio.wait_for(sreader.readline(), self.timeout) request += res if res == b'\\r\\n': # end of",
"of the Software, and to permit persons to whom the Software is furnished",
"= None self.backlog = backlog self.timeout = timeout self._counter = 0 # Remove",
"re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if path == route[\"path\"]: return route else: match = re.search(\"^\" +",
"and/or sell copies of the Software, and to permit persons to whom the",
"res = await asyncio.wait_for(sreader.read(1024), self.timeout) print(res) request += res if request[-4:] == b'\\r\\n\\r\\n':",
"# when a user presses the \"Backward/Forward\" button in a browser. swriter.write(\"\\r\\n\") #",
"be stored in any cache. # This is necessary to execute the code",
"e: print(3, e) pass swriter_s_fileno = swriter.s.fileno() await swriter.wait_closed() #print('Client socket closed.', self,",
"of charge, to any person obtaining a copy of this software and associated",
"(the \"Software\"), to deal in the Software without restriction, including without limitation the",
"IN THE SOFTWARE. \"\"\" import re import sys import io import utime import",
"None self.backlog = backlog self.timeout = timeout self._counter = 0 # Remove it",
"= backlog self.timeout = timeout self._counter = 0 # Remove it in the",
"copyright notice and this permission notice shall be included in all copies or",
"re.search(\"^([A-Z]+)\", request).group(1) for route in self._routes: if method != route[\"method\"]: continue path =",
"#print('request >>>{}<<<'.format(request)) try: route = self.find_route(request) if route: await route[\"handler\"](swriter, request) else: await",
"request.split(\"\\r\\n\") #print('lines', lines) if len(lines[0]) > 0: method = re.search(\"^([A-Z]+)\", lines[0]).group(1) for route",
"of HTTP request break ''' # 150-250 ms request = await asyncio.wait_for(sreader.readline(), self.timeout)",
"else: match = re.search(\"^\" + route[\"path\"] + \"$\", path) if match: return route",
"Find route \"\"\" lines = request.split(\"\\r\\n\") #print('lines', lines) if len(lines[0]) > 0: method",
"timeout self._counter = 0 # Remove it in the production release. async def",
"to permit persons to whom the Software is furnished to do so, subject",
"handler, method=\"GET\"): \"\"\" Add new route \"\"\" self._routes.append({\"path\": path, \"handler\": handler, \"method\": method})",
"+= res if res == b'\\r\\n': # end of HTTP request break '''",
"sreader.s.fileno(), sreader.e['peername']) try: request = b'' res = b'' while True: try: '''",
"\" + status_message[status] + \"\\r\\n\" + \\ content_type + \"\\r\\n\") await swriter.drain() for",
"import uasyncio as asyncio class aMicroPyServer(object): def __init__(self, host=\"0.0.0.0\", port=80, backlog=5, timeout=20): \"\"\"",
"str_error, status=500) def on_request(self, handler): \"\"\" Set request handler \"\"\" self._on_request_handler = handler",
"is closed.') def add_route(self, path, handler, method=\"GET\"): \"\"\" Add new route \"\"\" self._routes.append({\"path\":",
"Send response to client \"\"\" if swriter is None: raise Exception(\"Can't send response,",
"conditions: The above copyright notice and this permission notice shall be included in",
"asyncio.wait_for(sreader.read(1024), self.timeout) print(res) request += res if request[-4:] == b'\\r\\n\\r\\n': # end of",
"# This prevents showing the cashed text # when a user presses the",
"request).group(1) for route in self._routes: if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\",",
"THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO",
"route in self._routes: if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if",
"client connection on {}:{}'.format(self._host, self._port)) self.server = await asyncio.start_server(self.run_client, self._host, self._port, self.backlog) while",
"OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER",
"Permission is hereby granted, free of charge, to any person obtaining a copy",
"def send(self, swriter, response, status=200, content_type=\"Content-Type: text/plain\", extra_headers=[]): \"\"\" Send response to client",
"self.backlog = backlog self.timeout = timeout self._counter = 0 # Remove it in",
"self._routes: if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if path ==",
"in a browser. swriter.write(\"\\r\\n\") # end of HTTP header await swriter.drain() self._counter +=",
"be included in all copies or substantial portions of the Software. THE SOFTWARE",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"\"\"\" Find route \"\"\" lines = request.split(\"\\r\\n\") #print('lines', lines) if len(lines[0]) > 0:",
"re.search(\"^([A-Z]+)\", lines[0]).group(1) for route in self._routes: if method != route[\"method\"]: continue path =",
"request break ''' # 150-250 ms res = await asyncio.wait_for(sreader.read(1024), self.timeout) print(res) request",
"stored in any cache. # This is necessary to execute the code on",
"whom the Software is furnished to do so, subject to the following conditions:",
"+ \"\\r\\n\") await swriter.drain() for header in extra_headers: swriter.write(header + \"\\r\\n\") ### await",
"header in extra_headers: swriter.write(header + \"\\r\\n\") ### await swriter.write(\"X-Powered-By: MicroPyServer\\r\\n\") # not required,",
"socket connection broken print('raise OSError') raise OSError if request: request = str(request, \"utf8\")",
"found\", status=404) async def internal_error(self, swriter, error): \"\"\" Catch error action \"\"\" output",
"403: \"Forbidden\", 404: \"Not Found\", 500: \"Internal Server Error\"} swriter.write(\"HTTP/1.0 \" + str(status)",
"\\ content_type + \"\\r\\n\") await swriter.drain() for header in extra_headers: swriter.write(header + \"\\r\\n\")",
"swriter.write(\"HTTP/1.0 \" + str(status) + \" \" + status_message[status] + \"\\r\\n\" + \\",
"await self.not_found(swriter) #1/0 # test internal_error except Exception as e: print(2, e) self.internal_error(swriter,",
"WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\"",
"e) pass swriter_s_fileno = swriter.s.fileno() await swriter.wait_closed() #print('Client socket closed.', self, swriter, swriter_s_fileno,",
"FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR",
"utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect() #print('---------------------------------------------------------------') async def close(self): print('Closing server...') self.server.close() await self.server.wait_closed() print('Server",
"test internal_error except Exception as e: print(2, e) self.internal_error(swriter, e) raise except OSError",
"= request.split(\"\\r\\n\") #print('lines', lines) if len(lines[0]) > 0: method = re.search(\"^([A-Z]+)\", lines[0]).group(1) for",
"pass swriter_s_fileno = swriter.s.fileno() await swriter.wait_closed() #print('Client socket closed.', self, swriter, swriter_s_fileno, swriter.s.fileno(),",
"-1: await self.send(swriter, \"Error: \" + str_error, status=500) def on_request(self, handler): \"\"\" Set",
"self.start_time = utime.ticks_ms() #print('Got connection from client', self, sreader, sreader.s.fileno(), sreader.e['peername']) try: request",
"uasyncio as asyncio class aMicroPyServer(object): def __init__(self, host=\"0.0.0.0\", port=80, backlog=5, timeout=20): \"\"\" Constructor",
"+ \" \" + status_message[status] + \"\\r\\n\" + \\ content_type + \"\\r\\n\") await",
"request): \"\"\" Find route \"\"\" lines = request.split(\"\\r\\n\") #print('lines', lines) if len(lines[0]) >",
"portions of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF",
"THE SOFTWARE. \"\"\" import re import sys import io import utime import gc",
"res = b'' while True: try: ''' # 450-550 ms res = await",
"extra_headers=[]): \"\"\" Send response to client \"\"\" if swriter is None: raise Exception(\"Can't",
"0 # Remove it in the production release. async def start(self): \"\"\" Start",
"status_message[status] + \"\\r\\n\" + \\ content_type + \"\\r\\n\") await swriter.drain() for header in",
"\"\"\" output = io.StringIO() sys.print_exception(error, output) str_error = output.getvalue() output.close() if swriter.s.fileno() !=",
"request = await asyncio.wait_for(sreader.readline(), self.timeout) res = await asyncio.wait_for(sreader.read(1024), self.timeout) if res[-4:] ==",
"method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if path == route[\"path\"]: return",
"= io.StringIO() sys.print_exception(error, output) str_error = output.getvalue() output.close() if swriter.s.fileno() != -1: await",
"simple HTTP server for MicroPython projects. @see https://github.com/troublegum/micropyserver The MIT License Copyright (c)",
"of HTTP request break ''' except asyncio.TimeoutError as e: print(1, e, \"asyncio.TimeoutError\", self.timeout)",
"asyncio.TimeoutError as e: print(1, e, \"asyncio.TimeoutError\", self.timeout) res = b'' if res ==",
"find_route(self, request): \"\"\" Find route \"\"\" lines = request.split(\"\\r\\n\") #print('lines', lines) if len(lines[0])",
"while True: await asyncio.sleep(1) async def run_client(self, sreader, swriter): self.start_time = utime.ticks_ms() #print('Got",
"DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,",
"request += res if res == b'\\r\\n': # end of HTTP request break",
"distribute, sublicense, and/or sell copies of the Software, and to permit persons to",
"of the Software. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY",
"res = await asyncio.wait_for(sreader.read(1024), self.timeout) if res[-4:] == b'\\r\\n\\r\\n': # end of HTTP",
"software and associated documentation files (the \"Software\"), to deal in the Software without",
"\"\"\" print('Awaiting client connection on {}:{}'.format(self._host, self._port)) self.server = await asyncio.start_server(self.run_client, self._host, self._port,",
"self.timeout) res = b'' if res == b'': # socket connection broken print('raise",
"self, swriter, swriter_s_fileno, swriter.s.fileno(), swriter.e['peername']) print('Render time: %i ms' % utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect()",
"\"\"\" method = re.search(\"^([A-Z]+)\", request).group(1) for route in self._routes: if method != route[\"method\"]:",
"asyncio.wait_for(sreader.readline(), self.timeout) res = await asyncio.wait_for(sreader.read(1024), self.timeout) if res[-4:] == b'\\r\\n\\r\\n': # end",
"THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" import re import sys",
"if match: return route return None ''' async def not_found(self, swriter): \"\"\" Not",
"HTTP header await swriter.drain() self._counter += 1 swriter.write(str(self._counter) + '\\r\\n') swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf))",
"b'\\r\\n\\r\\n': # end of HTTP request break ''' except asyncio.TimeoutError as e: print(1,",
"handler, \"method\": method}) async def send(self, swriter, response, status=200, content_type=\"Content-Type: text/plain\", extra_headers=[]): \"\"\"",
"lines[0]).group(1) if path == route[\"path\"]: return route else: match = re.search(\"^\" + route[\"path\"]",
"swriter_s_fileno, swriter.s.fileno(), swriter.e['peername']) print('Render time: %i ms' % utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect() #print('---------------------------------------------------------------') async",
"def close(self): print('Closing server...') self.server.close() await self.server.wait_closed() print('Server is closed.') def add_route(self, path,",
"shall be included in all copies or substantial portions of the Software. THE",
"method}) async def send(self, swriter, response, status=200, content_type=\"Content-Type: text/plain\", extra_headers=[]): \"\"\" Send response",
"res if request[-4:] == b'\\r\\n\\r\\n': # end of HTTP request break ''' #",
"user presses the \"Backward/Forward\" button in a browser. swriter.write(\"\\r\\n\") # end of HTTP",
"+ '\\r\\n') swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await swriter.drain() #print(\"Finished processing request.\") def find_route(self, request):",
"match = re.search(\"^\" + route[\"path\"] + \"$\", path) if match: return route return",
"+ str(status) + \" \" + status_message[status] + \"\\r\\n\" + \\ content_type +",
"self.find_route(request) if route: await route[\"handler\"](swriter, request) else: await self.not_found(swriter) #1/0 # test internal_error",
"NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,",
"LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.",
"not_found(self, swriter): \"\"\" Not found action \"\"\" await self.send(swriter, \"404 Not found\", status=404)",
"IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
"connection on {}:{}'.format(self._host, self._port)) self.server = await asyncio.start_server(self.run_client, self._host, self._port, self.backlog) while True:",
"PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT",
"Found\", 500: \"Internal Server Error\"} swriter.write(\"HTTP/1.0 \" + str(status) + \" \" +",
"the Software is furnished to do so, subject to the following conditions: The",
"+= 1 swriter.write(str(self._counter) + '\\r\\n') swriter.write(response) #print(\"swriter.out_buf >>>{}<<<\".format(swriter.out_buf)) await swriter.drain() #print(\"Finished processing request.\")",
"# end of HTTP header await swriter.drain() self._counter += 1 swriter.write(str(self._counter) + '\\r\\n')",
"import sys import io import utime import gc import uasyncio as asyncio class",
"#1/0 # test internal_error except Exception as e: print(2, e) self.internal_error(swriter, e) raise",
"IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING",
"request.\") def find_route(self, request): \"\"\" Find route \"\"\" method = re.search(\"^([A-Z]+)\", request).group(1) for",
"showing the cashed text # when a user presses the \"Backward/Forward\" button in",
"A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT",
"= utime.ticks_ms() #print('Got connection from client', self, sreader, sreader.s.fileno(), sreader.e['peername']) try: request =",
"connection from client', self, sreader, sreader.s.fileno(), sreader.e['peername']) try: request = b'' res =",
"route = self.find_route(request) if route: await route[\"handler\"](swriter, request) else: await self.not_found(swriter) #1/0 #",
"is a simple HTTP server for MicroPython projects. @see https://github.com/troublegum/micropyserver The MIT License",
"EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS",
"any cache. # This is necessary to execute the code on the server:",
"= re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if path == route[\"path\"]: return route else: match = re.search(\"^\"",
"route[\"path\"] + \"$\", path) if match: return route return None ''' def find_route(self,",
"#print('Got connection from client', self, sreader, sreader.s.fileno(), sreader.e['peername']) try: request = b'' res",
"The MIT License Copyright (c) 2019 troublegum. https://github.com/troublegum/micropyserver Permission is hereby granted, free",
"subject to the following conditions: The above copyright notice and this permission notice",
"PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE",
"close(self): print('Closing server...') self.server.close() await self.server.wait_closed() print('Server is closed.') def add_route(self, path, handler,",
"ON and switch PIN OFF. # This prevents showing the cashed text #",
"return route return None ''' def find_route(self, request): \"\"\" Find route \"\"\" lines",
"gc.collect() #print('---------------------------------------------------------------') async def close(self): print('Closing server...') self.server.close() await self.server.wait_closed() print('Server is closed.')",
"method=\"GET\"): \"\"\" Add new route \"\"\" self._routes.append({\"path\": path, \"handler\": handler, \"method\": method}) async",
"e) raise except OSError as e: print(3, e) pass swriter_s_fileno = swriter.s.fileno() await",
"closed.', self, swriter, swriter_s_fileno, swriter.s.fileno(), swriter.e['peername']) print('Render time: %i ms' % utime.ticks_diff(utime.ticks_ms(), self.start_time))",
"DEALINGS IN THE SOFTWARE. \"\"\" import re import sys import io import utime",
"\"\"\" Find route \"\"\" method = re.search(\"^([A-Z]+)\", request).group(1) for route in self._routes: if",
"is hereby granted, free of charge, to any person obtaining a copy of",
"\" \" + status_message[status] + \"\\r\\n\" + \\ content_type + \"\\r\\n\") await swriter.drain()",
"port self._routes = [] self._on_request_handler = None self.backlog = backlog self.timeout = timeout",
"and associated documentation files (the \"Software\"), to deal in the Software without restriction,",
"FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,",
"without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or",
"= host self._port = port self._routes = [] self._on_request_handler = None self.backlog =",
"Remove it in the production release. async def start(self): \"\"\" Start server \"\"\"",
"HTTP server for MicroPython projects. @see https://github.com/troublegum/micropyserver The MIT License Copyright (c) 2019",
"as e: print(1, e, \"asyncio.TimeoutError\", self.timeout) res = b'' if res == b'':",
"PIN OFF. # This prevents showing the cashed text # when a user",
"continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1) if path == route[\"path\"]: return route else: match",
"= re.search(\"^([A-Z]+)\", request).group(1) for route in self._routes: if method != route[\"method\"]: continue path",
"swriter, error): \"\"\" Catch error action \"\"\" output = io.StringIO() sys.print_exception(error, output) str_error",
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER",
"connection broken print('raise OSError') raise OSError if request: request = str(request, \"utf8\") #print('request",
"Error\"} swriter.write(\"HTTP/1.0 \" + str(status) + \" \" + status_message[status] + \"\\r\\n\" +",
"#print('---------------------------------------------------------------') async def close(self): print('Closing server...') self.server.close() await self.server.wait_closed() print('Server is closed.') def",
"len(lines[0]) > 0: method = re.search(\"^([A-Z]+)\", lines[0]).group(1) for route in self._routes: if method",
"__init__(self, host=\"0.0.0.0\", port=80, backlog=5, timeout=20): \"\"\" Constructor \"\"\" self._host = host self._port =",
"None ''' async def not_found(self, swriter): \"\"\" Not found action \"\"\" await self.send(swriter,",
"request: request = str(request, \"utf8\") #print('request >>>{}<<<'.format(request)) try: route = self.find_route(request) if route:",
"hereby granted, free of charge, to any person obtaining a copy of this",
"CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE",
"if swriter is None: raise Exception(\"Can't send response, no connection instance\") status_message =",
"swriter.e['peername']) print('Render time: %i ms' % utime.ticks_diff(utime.ticks_ms(), self.start_time)) gc.collect() #print('---------------------------------------------------------------') async def close(self):",
"to client \"\"\" if swriter is None: raise Exception(\"Can't send response, no connection",
"no connection instance\") status_message = {200: \"OK\", 400: \"Bad Request\", 403: \"Forbidden\", 404:",
"self._port = port self._routes = [] self._on_request_handler = None self.backlog = backlog self.timeout",
"browser. swriter.write(\"\\r\\n\") # end of HTTP header await swriter.drain() self._counter += 1 swriter.write(str(self._counter)",
"res[-4:] == b'\\r\\n\\r\\n': # end of HTTP request break ''' except asyncio.TimeoutError as",
"end of HTTP header await swriter.drain() self._counter += 1 swriter.write(str(self._counter) + '\\r\\n') swriter.write(response)",
"https://github.com/troublegum/micropyserver The MIT License Copyright (c) 2019 troublegum. https://github.com/troublegum/micropyserver Permission is hereby granted,",
"restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute,",
"self.timeout = timeout self._counter = 0 # Remove it in the production release.",
"re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if path == route[\"path\"]: return route else: match = re.search(\"^\" +",
"OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR",
"backlog self.timeout = timeout self._counter = 0 # Remove it in the production",
"to the following conditions: The above copyright notice and this permission notice shall",
"for route in self._routes: if method != route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", request).group(1)",
"# 450-550 ms res = await asyncio.wait_for(sreader.readline(), self.timeout) request += res if res",
"OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS",
"(c) 2019 troublegum. https://github.com/troublegum/micropyserver Permission is hereby granted, free of charge, to any",
"Software, and to permit persons to whom the Software is furnished to do",
"io import utime import gc import uasyncio as asyncio class aMicroPyServer(object): def __init__(self,",
"= b'' while True: try: ''' # 450-550 ms res = await asyncio.wait_for(sreader.readline(),",
"''' # 150-250 ms request = await asyncio.wait_for(sreader.readline(), self.timeout) res = await asyncio.wait_for(sreader.read(1024),",
"closed.') def add_route(self, path, handler, method=\"GET\"): \"\"\" Add new route \"\"\" self._routes.append({\"path\": path,",
"route[\"method\"]: continue path = re.search(\"^[A-Z]+\\\\s+(/[-a-zA-Z0-9_.]*)\", lines[0]).group(1) if path == route[\"path\"]: return route else:",
"the \"Backward/Forward\" button in a browser. swriter.write(\"\\r\\n\") # end of HTTP header await",
"async def send(self, swriter, response, status=200, content_type=\"Content-Type: text/plain\", extra_headers=[]): \"\"\" Send response to",
"== b'': # socket connection broken print('raise OSError') raise OSError if request: request",
"match: return route return None ''' def find_route(self, request): \"\"\" Find route \"\"\"",
"MicroPython projects. @see https://github.com/troublegum/micropyserver The MIT License Copyright (c) 2019 troublegum. https://github.com/troublegum/micropyserver Permission",
"import utime import gc import uasyncio as asyncio class aMicroPyServer(object): def __init__(self, host=\"0.0.0.0\",",
"e) self.internal_error(swriter, e) raise except OSError as e: print(3, e) pass swriter_s_fileno =",
"sreader, sreader.s.fileno(), sreader.e['peername']) try: request = b'' res = b'' while True: try:"
] |
[
"없으면 탐색 종료 break else: # 항상 오른쪽 노드가 큰 값임 no =",
"prev_no != 0xffffffff and next_no != 0xffffffff: # 양쪽 모두 노트가 존재함 #",
"엔진 ID, 압축된 파일 이름]] # --------------------------------------------------------------------- def arclist(self, filename, fileformat): file_scan_list =",
"buf[t_off + 4:] if pps_next is not None: t_off = off + 0x48",
"node=0, prefix=''): if node == 0: pps_name = '' name = prefix +",
"추가하기 t_num = len(t_data) / self.ssize # 몇개의 블록이 필요한가? self.__add_small_block_num(t_num) # 필요한",
"# 링크 끝 설정하기 # 남은 링크는 모두 0xffffffff로 설정하기 for i in",
"self.mm = self.mm[:0x48] + data + self.mm[0x4C:] # XBBD 블록 연결 next_b =",
"Block의 링크를 구함 self.__modify_big_block_link(t_link, add_big_num) # 이전 링크에 필요한 블록 수 추가하여 링크를",
"0x3f)) och.append(ch) ret_str = '' for ch in och: ret_str += struct.pack('<H', ch)",
"# --------------------------------------------------------------------- def getinfo(self): # 플러그인 엔진의 주요 정보 info = dict() #",
"= list() # 리스트형 변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는 악성코드 이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820')",
"SDB 적용하기 return ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD를 수정한다. #",
"'1.0' # ------------------------------------------------------------------------- # 엔진 오류 메시지를 정의 # ------------------------------------------------------------------------- class Error(Exception): pass",
"__modify_big_block_link(self, old_link, add_num): if add_num < 0: return [] # 전체 BBD 링크를",
"Start', '%08X' % xbbd_start_block) kavutil.vprint(None, 'Num of XBBD Blocks', '%d' % num_of_xbbd_blocks) if",
"return True else: return False # --------------------------------------------------------------------- # 스트림을 연다 # --------------------------------------------------------------------- def",
"size = num * self.ssize # 추가해야 할 용량 add_big_num = (size /",
"마지막에 새로운 XBBD 링크 추가 t_data = t_data[:-4] + struct.pack('<L', last_no) off =",
"!= 0xfffffffe: ret.append(next_b) while True: try: next_b = fat[next_b] if next_b == 0xfffffffe:",
"data + ('\\x00' * ((n*self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기",
"ord(d[0x24]) ret['ff_hwp'] = {'compress': (val & 0x1 == 0x1), 'encrypt': (val & 0x2",
"raise error('Not Support : BBD -> BBD (Inc)') n = (len(data) / self.bsize)",
"list 블록에 들어갈 수 있는 Big Block 개수 for no in special_no: seg",
"reset_stream=False): for p in self.__full_list: if p['Name'] == name: no = p['Node'] break",
"+= struct.pack('<L', i) # self.mm에 BBD 적용하기 t, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\",
"담을 수 있음 b_num = (add_num / (self.bsize/4)) + (1 if (add_num %",
"# --------------------------------------------------------------------- def MsiBase64Encode(x): ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x > 63: return None",
"if idx <= 109: return 0x4c + (idx * 4) else: t_idx =",
"name = pps[0:t_size-2] p['Name'] = DecodeStreamName(name).decode('UTF-16LE', 'replace') else: p['Name'] = '' p['Type'] =",
"no = t_link[i+1] data = struct.pack('<L', no) no = t_link[i] sbd = sbd[:no*4]",
"BBD (Inc)') n = (len(data) / self.bsize) + (1 if (len(data) % self.bsize)",
"name): # ----------------------------------------------------------------- # 스트림 전용 클래스 # ----------------------------------------------------------------- class Stream: def __init__(self,",
"수정된 BBD 이미지 # --------------------------------------------------------------------- def __modify_bbd(self, bbd): self.bbd = bbd # 체크",
"--------------------------------------------------------------------- # OLE 파싱하기 # --------------------------------------------------------------------- def parse(self): buf = self.mm[:8] if buf",
"* 4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]] = 0xfffffffe # 링크",
"+ 1) * self.bsize if len(data) == self.bsize: self.mm = self.mm[:off] + data",
"import kavutil # ------------------------------------------------------------------------- # 메시지 출력 함수 # ------------------------------------------------------------------------- __version__ = '1.0'",
"BBD에는 bsize / 4 개수만큼 Big Block을 담을 수 있음 b_num = (add_num",
"+ 1)] else: for n in list_array: div_n = self.parent.bsize / self.parent.ssize off",
"\\ get_bbd_list_array(self.mm, self.verbose) ''' # 상당히 많은 데이터가 출력되어 주석 처리 if self.verbose:",
"--------------------------------------------------------------------- # PPS 전체 경로 구하기 (내장) # --------------------------------------------------------------------- def __get_pps_path(self, node=0, prefix=''):",
"del_pps['Next'] dir_no = del_pps['Dir'] # root를 찾기 root_no = self.__get_root_node(del_no) # 양쪽 노드가",
"filename in self.handle: # 이전에 열린 핸들이 존재하는가? zfile = self.handle.get(filename, None) else:",
"여부 (True or False) # --------------------------------------------------------------------- def mkarc(self, arc_engine_id, arc_name, file_infos): if arc_engine_id",
"no % bb_num # print hex(no), hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no = kavutil.get_uint32(bbd_list_array,",
"- len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat) #",
"+= get_bblock(self.mm, no, self.bsize) self.sbd_fat = {} for i in range(len(self.sbd) / 4):",
"필요한 개수로 링크 줄이기 # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link)",
"p['Size'] = kavutil.get_uint32(pps, 0x78) p['Valid'] = False # CVE-2012-0158 검사하기 # pps에 ListView.2의",
"에러 return -1 if idx <= 109: return 0x4c + (idx * 4)",
"start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 SBD의 링크는 모두 삭제한다. #",
"self.__deep -= 1 if self.pps[node]['Prev'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix) if self.pps[node]['Next'] != 0xFFFFFFFFL:",
"# 실제 필요한 데이터 블록 self.mm += attach_data else: special_no = [] #",
"0x3c) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) for i, n in enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return",
"= {} mm = filehandle # OLE 헤더와 동일 if mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1':",
"n in enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return org_link_list",
"'%2d %-23s %d %8X %8X %8X %8X %8d' % (self.pps.index(p), p['Name'], p['Type'], p['Prev'],",
"self.bsize) kavutil.vprint(None, 'Small Block Size', '%d' % self.ssize) print kavutil.HexDump().Buffer(self.mm, 0, 0x60) print",
"* b_num for i in range(b_num): bbd_no.append(last_no) last_no += 1 # 최종 조합",
"self.verbose: kavutil.vprint('Header') kavutil.vprint(None, 'Big Block Size', '%d' % self.bsize) kavutil.vprint(None, 'Small Block Size',",
"i, no in enumerate(bbd_link) if (no == 0xffffffff)] if old_link: ret_link = old_link",
"링크에 필요한 블록 수 추가하여 링크를 새롭게 생성 # Root 크기 수정 self.__set_pps_header(0,",
"else: # 0x3800 - 0x383F # the value contains two characters ch -=",
"이름 # 리턴값 : 압축 해제된 내용 or None # --------------------------------------------------------------------- def unarc(self,",
"== '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] = 'OLE' # OLE 뒤에 첨부된 파일이 있는지를 조사한다. fsize",
"= verbose return 0 # 플러그인 엔진 초기화 성공 # --------------------------------------------------------------------- # uninit(self)",
"수 추가하기 # 수집된 마지막 링크 이후에 존재하는 사용하지 않는 블록을 수집한다. t_link",
"* self.read_size data += self.parent.mm[off:off + self.read_size * (e - s + 1)]",
"p['Start'] = kavutil.get_uint32(pps, 0x74) p['Size'] = kavutil.get_uint32(pps, 0x78) p['Valid'] = False # CVE-2012-0158",
"((bsize / 4) - 1)) + (1 if (t_idx % ((bsize / 4)",
"'Num of BBD Blocks', '%d' % num_of_bbd_blocks) kavutil.vprint(None, 'XBBD Start', '%08X' % xbbd_start_block)",
"사용한다. if org_size >= 0x1000: # 기존에는 BBD 사용 if org_size >= len(data):",
"self.bsize:] # XBBD 생성하기 for i in range(x_num): x_data += '\\xff\\xff\\xff\\xff' * ((self.bsize/4)",
"('\\x00' * ((n * self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기",
"is False: # 유효한 Tree가 아니면 다음 continue t = '' t +=",
"% (self.pps.index(p), p['Name'], p['Type'], p['Prev'], p['Next'], p['Dir'], p['Start'], p['Size']) ''' print ' %-2s",
"or pps['Next'] == node or pps['Dir'] == node: return i def __get_max_node(self, node):",
"존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_big_block_link(t_link, t_num) # Big block 영역에",
"n if self.verbose: open('bbd.dmp', 'wb').write(self.bbd) print kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd, 0, 0x80) # Root",
"링크 수집하기 t_num = 0 if (len(t_link) * self.ssize) < len(t_data): # 블록",
"data = struct.pack('<L', total_xbbd_num) self.mm = self.mm[:0x48] + data + self.mm[0x4C:] # XBBD",
"2)) for ch in wch: if 0x3800 <= ch <= 0x4840: if ch",
"self.mm = self.mm[:off] + t_data[i * self.bsize:(i + 1) * self.bsize] + self.mm[off",
"print ' ' + '%2d %-23s %d %8X %8X %8X %8X %8d' %",
"리턴값 : 압축 파일 핸들 # --------------------------------------------------------------------- def __get_handle(self, filename): if filename in",
"\\ get_bbd_list_array(self.mm, self.verbose) bbd_list_array = [] for i in range(len(t) / 4): bbd_list_array.append(kavutil.get_uint32(t,",
"4] + '\\xfe\\xff\\xff\\xff' + sbd[(no + 1) * 4:] # SBD가 나누어 bsize",
"self.bsize)] if len(free_link) >= num: # 여유분이 충분히 존재함... return # 추가할 필요",
"= self.mm[:0x2c] + struct.pack('<L', total_bbd_num) + self.mm[0x30:] last_no += 1 # XBBD 처리하기",
"for i in range(len(sbd) / 4): sbd_link.append(kavutil.get_uint32(sbd, i*4)) # 사용하지 않는 SBD 링크를",
"'\\x00' * 0x80 + buf[off+0x80:] if size is not None: t_off = off",
"SBD를 사용한다. if org_size >= 0x1000: # 기존에는 BBD 사용 # raise error('Not",
"작업 완료 n = (len(data) / self.ssize) + (1 if (len(data) % self.ssize)",
"0x4c) p['Start'] = kavutil.get_uint32(pps, 0x74) p['Size'] = kavutil.get_uint32(pps, 0x78) p['Valid'] = False #",
"kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 4) else: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 109) next_b =",
"# PPS Tree의 유효성을 체크한다. (내장) # --------------------------------------------------------------------- def __valid_pps_tree(self): scaned_pps_node = [0]",
"블록에 들어갈 수 있는 Big Block 개수 for no in special_no: seg =",
"# 플러그인 엔진의 주요 정보 info = dict() # 사전형 변수 선언 info['author']",
"range(len(name) / 2): wch.append(kavutil.get_uint16(name, i * 2)) for ch in wch: if 0x3800",
"size=len(data)) else: # raise error('Not Support : BBD -> BBD (Inc)') n =",
"arclist(self, filename, fileformat): file_scan_list = [] # 검사 대상 정보를 모두 가짐 #",
"num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) # BBD를 모은다 bbd = '' for i",
"if fsize > rsize: fileformat = { # 포맷 정보를 담을 공간 'Attached_Pos':",
"덮어쓰기 sbd_no = kavutil.get_uint32(self.mm, 0x3c) # sbd_list_array = get_block_link(sbd_no, self.bbd) sbd_list_array = get_block_link(sbd_no,",
"if isinstance(input_data, types.StringType): if os.path.exists(input_data): self.isfile = True self.fname = input_data self.fp =",
"arcclose(self): for fname in self.handle.keys(): zfile = self.handle[fname] zfile.close() self.handle.pop(fname) # --------------------------------------------------------------------- #",
"리턴값 : 압축 해제된 내용 or None # --------------------------------------------------------------------- def unarc(self, arc_engine_id, arc_name,",
"ord(pps[1]) == 0x00: name = '_\\x00' + pps[2:t_size-2] else: name = pps[0:t_size-2] p['Name']",
"p['Type'], t, p['Size']) # PPS 전체 경로 구하기 self.__deep = 0 self.__full_list =",
"* self.ssize self.mm = self.mm[:off] + t_data[i * self.ssize:(i + 1) * self.ssize]",
"PPS 전체 경로 구하기 (스트림만 출력) # --------------------------------------------------------------------- def listdir(self, streams=True, storages=False): ret",
"self.mm = self.mm[:off] + t_data[i * self.ssize:(i + 1) * self.ssize] + self.mm[off",
"self.ssize) < len(t_data): # 블록 추가해야 하나? t_size = len(t_data) - (len(t_link) *",
"range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) if self.verbose: open('bbd.dm2',",
"self.__modify_big_block_link(t_link, add_big_num) # 이전 링크에 필요한 블록 수 추가하여 링크를 새롭게 생성 #",
"= None self.bbd_fat = {} self.sbd = None self.root = None self.pps =",
"fp.read() except: data = None return data # --------------------------------------------------------------------- # arcclose(self) # 압축",
"self.__valid_pps_tree() is False: return False if self.verbose: print kavutil.vprint('Property Storage') ''' print '",
"enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return org_link_list else: raise",
"i in range(len(sbd) / 4): sbd_link.append(kavutil.get_uint32(sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다.",
"sbd_startblock) kavutil.vprint(None, 'Num of SBD Blocks', '%d' % num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd, 0, 0x80)",
"4) self.mm = self.mm[:t_off] + '\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:] # print repr(self.mm[t_off:t_off+4]) # t",
"# 이전 Small Block의 링크를 구함 self.__modify_big_block_link(t_link, add_big_num) # 이전 링크에 필요한 블록",
"= self.pps[root_no] if pps['Prev'] == del_no: self.__set_pps_header(root_no, pps_prev=t_no) elif pps['Next'] == del_no: self.__set_pps_header(root_no,",
"i, n in enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return",
"Error('Not Ole signature') # big block, small bloc 크기 구하기 self.bsize = 1",
"완료 n = (len(data) / self.bsize) + (1 if (len(data) % self.bsize) else",
"SBD link # add_num : 추가 SBD link 개수 # --------------------------------------------------------------------- def __modify_small_block_link(self,",
"self.read_size = 0 self.fat = None # print self.parent.verbose # 연속된 숫자 값을",
"영역의 특정 위치에 1개의 Big Block Overwrite하기 (내장) # --------------------------------------------------------------------- def __set_bblock(self, no,",
"(e - s + 1)] else: for n in list_array: div_n = self.parent.bsize",
"압축된 파일 이름]] # --------------------------------------------------------------------- def arclist(self, filename, fileformat): file_scan_list = [] #",
"'1.1' # 버전 info['title'] = 'OLE Library' # 엔진 설명 info['kmd_name'] = 'ole'",
"root_list_array: self.root += get_bblock(self.mm, no, self.bsize) if self.verbose: open('root.dmp', 'wb').write(self.root) print kavutil.vprint('ROOT') kavutil.vprint(None,",
"* self.bsize if len(data) == self.bsize: self.mm = self.mm[:off] + data + self.mm[off+self.bsize:]",
"zfile = self.handle.get(filename, None) else: zfile = OleFile(filename, verbose=self.verbose) # ole 파일 열기",
"self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] return True return False # ---------------------------------------------------------------------",
"위한 헤더 수정 if num_of_xbbd_blocks == 0: data = struct.pack('<LL', last_no, total_xbbd_num) self.mm",
"영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else:",
"o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') # Root 수정, Next 수정 o.close() ''' o = OleFile('normal.hwp', verbose=True)",
"pics.read() d = d + d o.write_stream('FileHeader', d) o.close() ''' ''' # case1",
"0 if (len(t_link) * self.ssize) < len(t_data): # 블록 추가해야 하나? t_size =",
"if prev_no != 0xffffffff and next_no != 0xffffffff: # 양쪽 모두 노트가 존재함",
"--------------------------------------------------------------------- # SBD를 수정한다. # sbd : 수정된 SBD 이미지 # --------------------------------------------------------------------- def",
"sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) for i, n in enumerate(sbd_list_array):",
"return False self.bbd = '' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(self.bbd_list_array, i*4)",
"'wb').write(self.sbd) print kavutil.vprint('SBD') kavutil.vprint(None, 'Start Blocks', '%d' % sbd_startblock) kavutil.vprint(None, 'Num of SBD",
"/ 4)) + (1 if (t_num % ((self.bsize - 4) / 4)) else",
"+ bbd[(no + 1) * 4:] if self.verbose: open('bbd.dm3', 'wb').write(bbd) # 원래 이미지에",
"' + ('-' * 74) for p in self.pps: print ' ' +",
"in range(len(bbd_list_array)/4): n = kavutil.get_uint32(bbd_list_array, i*4) self.bbd += get_bblock(self.mm, n, self.bsize) # 새로운",
"return -1 t_buf = get_bblock(buf, next_b, bsize) next_b = kavutil.get_uint32(t_buf, bsize-4) return (next_b",
"OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('FileHeader') d = pics.read() d = d +",
"BBD 배열을 BBD 버퍼로 바꾸기 self.bbd = '' for i in t_link: self.bbd",
"/ 4)) + (1 if (add_num % (self.bsize / 4)) else 0) if",
"self.mm = mm self.pps = pps self.bsize = bsize self.ssize = ssize self.bbd",
"unarc(self, arc_engine_id, arc_name, fname_in_arc): data = None if arc_engine_id == 'arc_ole': o =",
"next_b = kavutil.get_uint32(t_buf, bsize-4) return (next_b + 1) * bsize + (off *",
"# 기존에는 BBD 사용 if org_size >= len(data): # raise error('Not Support :",
"구함 t_link = get_block_link(r_no, self.bbd_fat) # 이전 Small Block의 링크를 구함 self.__modify_big_block_link(t_link, add_big_num)",
"'Name': name[1:], 'Type': self.pps[node]['Type']} self.__full_list.append(p) if self.pps[node]['Dir'] != 0xFFFFFFFFL: self.__deep += 1 self.__get_pps_path(self.pps[node]['Dir'],",
"= '' for i in range(num_of_xbbd_blocks-1): t_data = get_bblock(self.mm, next_b, self.bsize) next_b =",
"name is invalid.') # print no ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd,",
"얻는다. # --------------------------------------------------------------------- def get_bbd_list_array(buf, verbose=False): bbd_list_array = buf[0x4c:0x200] # 전체 bbd_list num_of_bbd_blocks",
"self.__full_list = [] self.parse() # OLE 파일을 분석 def close(self): if self.isfile: self.fp.close()",
"'rb').read(8) if buf == 'D0CF11E0A1B11AE1'.decode('hex'): return True except IOError: pass return False #",
": 플러그인 엔진 정보 # --------------------------------------------------------------------- def getinfo(self): # 플러그인 엔진의 주요 정보",
"0x4c + (idx * 4) else: t_idx = idx - 109 seg =",
"0) t_data = data + ('\\x00' * ((n*self.ssize) - len(data))) # 여분의 크기를",
"return no def delete(self, del_no): del_pps = self.pps[del_no] prev_no = del_pps['Prev'] next_no =",
"# --------------------------------------------------------------------- def arcclose(self): for fname in self.handle.keys(): zfile = self.handle[fname] zfile.close() self.handle.pop(fname)",
"fname_in_arc - 압축 해제할 파일 이름 # 리턴값 : 압축 해제된 내용 or",
"self.bbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기",
"뒤쪽에 추가하기 t_num = len(t_data) / self.ssize # 몇개의 블록이 필요한가? self.__add_small_block_num(t_num) #",
"data 뒤쪽에 추가하기 # t_link = get_block_link(org_sb, self.sbd) # 이전 링크 수집하기 t_link",
"'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not Ole signature') # big block, small bloc 크기 구하기 self.bsize",
"node): # 해당 정보를 가진 root를 찾기 for i, pps in enumerate(self.pps): if",
"b_data = '' # add_data = '' add_num = num - n #",
"만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data),",
"해제할 파일 이름 # 리턴값 : 압축 해제된 내용 or None # ---------------------------------------------------------------------",
"블록 수 추가하기 # BBD 링크를 처음 생성하므로 이전 링크가 없다. t_link =",
"수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support : BBD -> BBD (Inc)')",
"return ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block,",
"p['Type'] == 2 and streams: ret.append(p['Name']) elif p['Type'] == 1 and storages: ret.append(p['Name'])",
"add_data = ('\\x00' * self.bsize * add_num) # 추가해야 할 BBD list 개수는",
"--------------------------------------------------------------------- # format(self, filehandle, filename, filename_ex) # 파일 포맷을 분석한다. # 입력값 :",
"1)] else: for n in list_array: div_n = self.parent.bsize / self.parent.ssize off =",
"링크 # --------------------------------------------------------------------- def __set_pps_header(self, node, size=None, start=None, pps_prev=None, pps_next=None, pps_dir=None, del_info=False): n",
"= self.mm[:off] + data + self.mm[off + self.bsize:] if __name__ == '__main__': #",
"get_bblock(self.mm, next_b, self.bsize) print kavutil.HexDump().Buffer(self.mm, (next_b+1) * self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) '''",
"def init(self, buf): # OLE 주요 데이터 self.mm = buf self.bsize = 0",
"= get_bbd_list_array(self.mm) bb_num = (self.bsize/4) # 한개의 BBD list 블록에 들어갈 수 있는",
"if self.pps[x]['Dir'] != 0xffffffff: if self.pps[x]['Dir'] in scaned_pps_node: self.pps[x]['Dir'] = 0xffffffff else: f.append(self.pps[x]['Dir'])",
"= -1 if no == -1: raise Error('PPS name is invalid.') # print",
"'Name', 'Type', 'Prev', 'Next', ' Dir', 'SB', 'Size') print ' ' + ('-'",
"True else: return False # --------------------------------------------------------------------- # 스트림을 연다 # --------------------------------------------------------------------- def openstream(self,",
"ret = [] for p in self.__full_list: if p['Type'] == 2 and streams:",
"i * 4)) for i, n in enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif",
"self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) #",
"& 0x1 == 0x1), 'encrypt': (val & 0x2 == 0x2), 'viewtext': (val &",
"num_of_bbd_blocks) kavutil.vprint(None, 'XBBD Start', '%08X' % xbbd_start_block) kavutil.vprint(None, 'Num of XBBD Blocks', '%d'",
"# bbd list 개수가 109보다 크면 xbbd를 가져와야 함 next_b = xbbd_start_block for",
"self.bbd for no in t_link: bbd = bbd[:no*4] + '\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:] self.__modify_bbd(bbd)",
"data + self.mm[0x4C:] # XBBD 블록 연결 next_b = xbbd_start_block if num_of_xbbd_blocks ==",
"if (len(data) % self.ssize) else 0) t_data = data + ('\\x00' * ((n*self.ssize)",
"free_link = [i for i, no in enumerate(sbd_link) if (no == 0xffffffff and",
"= [] for i in range(len(sbd) / 4): sbd_link.append(kavutil.get_uint32(sbd, i*4)) # 사용하지 않는",
"else 0) if old_b_num == b_num: break else: old_b_num = b_num total_bbd_num =",
"verbose return 0 # 플러그인 엔진 초기화 성공 # --------------------------------------------------------------------- # uninit(self) #",
"' %-2s %-32s %4s %-4s %-4s %-4s %8s %8s' % ('No', 'Name', 'Type',",
"get_block_link(org_sb, fat) ''' # 수정된 data를 쓰기 위해 준비한다 if len(data) >= 0x1000:",
"* self.bsize:(i + 1) * self.bsize] off = (no + 1) * self.bsize",
"num : 추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_big_block_num(self, num): size =",
"if verbose: kavutil.vprint(None, 'Num of BBD Blocks', '%d' % num_of_bbd_blocks) kavutil.vprint(None, 'XBBD Start',",
"+ data + sbd[(no+1)*4:] no = t_link[-1] sbd = sbd[:no * 4] +",
"struct import types import kernel import kavutil # ------------------------------------------------------------------------- # 메시지 출력 함수",
"+ (off * 4) # --------------------------------------------------------------------- # OLE 파일인지 확인한다. # --------------------------------------------------------------------- def",
"== 'HWP Document File': val = ord(d[0x24]) ret['ff_hwp'] = {'compress': (val & 0x1",
"== 2 and streams: ret.append(p['Name']) elif p['Type'] == 1 and storages: ret.append(p['Name']) else:",
"전체 경로 구하기 (내장) # --------------------------------------------------------------------- def __get_pps_path(self, node=0, prefix=''): if node ==",
"* self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) ''' if len(self.bbd_list_array)/4 < num_of_bbd_blocks: return False",
"# --------------------------------------------------------------------- # PPS Tree의 유효성을 체크한다. (내장) # --------------------------------------------------------------------- def __valid_pps_tree(self): scaned_pps_node",
"= (t_num / ((self.bsize - 4) / 4)) + (1 if (t_num %",
"파일 이름 # 리턴값 : {파일 포맷 분석 정보} or None # ---------------------------------------------------------------------",
"# Big Block을 주어진 개수만큼 추가한다. # num : 추가할 Big Block 개수",
"kavutil.get_uint16(buf, 0x1e) if verbose: kavutil.vprint(None, 'Num of BBD Blocks', '%d' % num_of_bbd_blocks) kavutil.vprint(None,",
"old_link, add_num): if add_num < 0: return [] # 전체 BBD 링크를 구한다",
"((bsize / 4) - 1)) else 0) off = (t_idx % ((bsize /",
"0 이외의 값 - 실패 # --------------------------------------------------------------------- def uninit(self): # 플러그인 엔진 종료",
"Big Block 추가 요청 # t_link = get_block_link(r_no, self.bbd) # 이전 Small Block의",
"= buf[0x4c:0x200] # 전체 bbd_list num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44)",
"추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_num = 0 if",
"+ struct.pack('<L', size) + buf[t_off + 4:] if start is not None: t_off",
"kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) # num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize =",
"t_link[-1] bbd = bbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + bbd[(no + 1) *",
"# CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return False self.pps[x]['Valid'] = True if self.pps[x]['Prev'] != 0xffffffff:",
"if self.verbose: print buf = get_bblock(self.mm, n, self.bsize) kavutil.HexDump().Buffer(buf, 0, 0x200) # ---------------------------------------------------------------------",
"struct.pack('<L', size) + buf[t_off + 4:] if start is not None: t_off =",
"Blocks') print self.small_block return True # --------------------------------------------------------------------- # PPS Tree의 유효성을 체크한다. (내장)",
"n >= num: # 잔여 개수가 추가하려는 개수보다 많거나 같으면 추가 블록 개수만",
"self.pps.append(p) # PPS Tree 검증 if self.__valid_pps_tree() is False: return False if self.verbose:",
"잔여 개수가 추가하려는 개수보다 많거나 같으면 추가 블록 개수만 파일 뒤에 추가하기 self.mm",
"= '' # add_data = '' add_num = num - n # 추가해야",
"# print '[-] rname :', o.write_stream(a_name, buf) # zfile.writestr(a_name, buf) else: # 삭제",
"이미지에 SBD 덮어쓰기 sbd_no = kavutil.get_uint32(self.mm, 0x3c) # sbd_list_array = get_block_link(sbd_no, self.bbd) sbd_list_array",
"%8d' % (self.pps.index(p), p['Name'], p['Type'], p['Prev'], p['Next'], p['Dir'], p['Start'], p['Size']) ''' print '",
"self.bsize:(i + 1) * self.bsize] + self.mm[off + self.bsize:] # --------------------------------------------------------------------- # 특정",
"%4s %-4s %-4s %-4s %8s %8s' % ('No', 'Name', 'Type', 'Prev', 'Next', '",
">= 0x1000: # BBD를 사용한다. if org_size >= 0x1000: # 기존에는 BBD 사용",
"== num_list.pop(0): break end = e break else: for i in range(len(num_list)): num_list.pop(0)",
"2): wch.append(kavutil.get_uint16(name, i * 2)) for ch in wch: if 0x3800 <= ch",
"o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True) print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') # Root 수정, Next 수정",
"else: raise Error('Input data is invalid.') # 수정 모드 self.write_mode = write_mode #",
"t_link): for i, n in enumerate(t_link): off = (self.small_block[n / 8] + 1)",
"self.parent.ssize self.fat = self.parent.sbd_fat list_array = get_block_link(sb, self.fat) data = '' if size",
"kavutil.get_uint32(t_data, self.bsize-4) # 기존 XBBD 마지막에 새로운 XBBD 링크 추가 t_data = t_data[:-4]",
"파일 이름]] # --------------------------------------------------------------------- def arclist(self, filename, fileformat): file_scan_list = [] # 검사",
"# 블록 추가해야 하나? t_size = len(t_data) - (len(t_link) * self.bsize) t_num =",
"== 0xffffffff and next_no != 0xffffffff: # Next만 존재 # 1. next 노드",
"cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return False self.pps.append(p) # PPS Tree 검증 if self.__valid_pps_tree() is False:",
"리스트형 변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는 악성코드 이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return",
"bsize) * bsize if fsize > rsize: fileformat = { # 포맷 정보를",
"/ self.bsize)] if len(free_link) >= num: # 여유분이 충분히 존재함... return # 추가할",
"= kavutil.get_uint32(t_buf, bsize-4) return (next_b + 1) * bsize + (off * 4)",
"o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('FileHeader') d = pics.read() d =",
"rname = file_info.get_filename() a_name = file_info.get_filename_in_archive() try: if os.path.exists(rname): with open(rname, 'rb') as",
"수정된 data를 쓰기 위해 준비한다 if len(data) >= 0x1000: # BBD를 사용한다. if",
"+ ('\\x00' * ((n * self.bsize) - len(data))) # 여분의 크기를 data 뒤쪽에",
"최적화 필요함 def get_liner_value(self, num_list): start = None end = None if not",
"# --------------------------------------------------------------------- def __decrease_bbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: # BBD를 배열로",
"수정 self.__set_pps_header(0, size=r_size + add_big_num * self.bsize) # --------------------------------------------------------------------- # BBD link 추가",
"self.init(buf) def init(self, buf): # OLE 주요 데이터 self.mm = buf self.bsize =",
"= None self.bbd_list_array = None self.bbd = None self.bbd_fat = {} self.sbd =",
"((t_no + 1) * self.bsize) + (off * 4) self.mm = self.mm[:t_off] +",
"print hex(off) self.mm = (self.mm[:off] + struct.pack('<L', no) + self.mm[off+4:]) # --------------------------------------------------------------------- #",
"개수 self.__add_big_block_num(add_big_num) # Big Block 추가 요청 # t_link = get_block_link(r_no, self.bbd) #",
"self.exploit.append('Exploit.OLE.CVE-2003-0820') return False else: # CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return False self.pps[x]['Valid'] = True",
"num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) bbd_list_array = [] for i in",
"'Attached_Pos': rsize, 'Attached_Size': fsize - rsize } ret['ff_attach'] = fileformat # HWP 인가?",
"+ 1) * 4:] if self.verbose: open('bbd.dm3', 'wb').write(bbd) # 원래 이미지에 BBD 덮어쓰기",
"bsize-4) return (next_b + 1) * bsize + (off * 4) # ---------------------------------------------------------------------",
"# 기존에는 SBD 사용 if org_size >= len(data): # raise error('Not Support :",
"# 스트림의 데이터를 덮어쓴다. # --------------------------------------------------------------------- def write_stream(self, name, data): for p in",
"+ data + self.mm[off + self.bsize:] if __name__ == '__main__': # import zlib",
"데이터 self.mm = buf self.bsize = 0 self.ssize = 0 # 임시 변수",
"= 'OLE' # OLE 뒤에 첨부된 파일이 있는지를 조사한다. fsize = len(mm) bsize",
"self.ssize) + (1 if (len(data) % self.ssize) else 0) t_data = data +",
"sb = pps['Start'] size = pps['Size'] if size >= 0x1000: self.read_size = self.parent.bsize",
"bsize = 1 << kavutil.get_uint16(buf, 0x1e) if verbose: kavutil.vprint(None, 'Num of BBD Blocks',",
"# import zlib # o = OleFile('normal.hwp', write_mode=True, verbose=True) o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True,",
"def __set_bblock(self, no, data): off = (no + 1) * self.bsize if len(data)",
"self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) t = ow.write(no, data) if t: self.init(t)",
"쓰기 위해 준비한다 if len(data) >= 0x1000: # BBD를 사용한다. if org_size >=",
"prev 노드 값을 root로 보낸다. t_no = prev_no elif prev_no == 0xffffffff and",
"'Next', ' Dir', 'SB', 'Size') print ' ' + ('-' * 74) for",
"in special_no: seg = no / bb_num off = no % bb_num #",
"# 양쪽 노드가 존재하는가? if prev_no != 0xffffffff and next_no != 0xffffffff: #",
"* 74) for p in self.pps: if p['Valid'] is False: # 유효한 Tree가",
"# ----------------------------------------------------------------- class Stream: def __init__(self, parent, node): self.parent = parent self.node =",
"# 1. prev 노드 값을 root로 보낸다. t_no = prev_no # 2. prev",
"name) self.__deep -= 1 if self.pps[node]['Prev'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix) if self.pps[node]['Next'] !=",
"단위가 아니면 맞춘다. n = len(sbd) % self.bsize if n: t = self.bsize",
"= 0 if (len(t_link) * self.bsize) < len(t_data): # 블록 추가해야 하나? t_size",
"링크 수 # --------------------------------------------------------------------- def __decrease_sbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: #",
"if self.pps[x]['Next'] != 0xffffffff: if self.pps[x]['Next'] in scaned_pps_node: self.pps[x]['Next'] = 0xffffffff else: f.append(self.pps[x]['Next'])",
"no = -1 if no == -1: raise Error('PPS name is invalid.') #",
"size=len(data)) return self.mm # --------------------------------------------------------------------- # 특정 데이터를 big block 링크를 따라 데이터",
"# Next만 존재 # 1. next 노드 값을 root로 보낸다. t_no = next_no",
"self.write_mode = write_mode # OLE 주요 데이터 self.mm = None self.bsize = None",
"XBBD Blocks', '%d' % num_of_xbbd_blocks) if num_of_bbd_blocks > 109: # bbd list 개수가",
"for i in range(num_of_xbbd_blocks): t_data = get_bblock(buf, next_b, bsize) bbd_list_array += t_data[:-4] next_b",
"하위에 next가 없는 node를 찾아서 del_pps의 next_no를 등록한다. blank_next_no = self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no)",
"self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no) elif prev_no != 0xffffffff and next_no == 0xffffffff: # Prev만",
"None # --------------------------------------------------------------------- def format(self, filehandle, filename, filename_ex): ret = {} mm =",
"and len(self.pps[x]['Name']) == 0: continue except IndexError: if (x & 0x90900000) == 0x90900000:",
"0xffffffff # root 노드를 수정한다. pps = self.pps[root_no] if pps['Prev'] == del_no: self.__set_pps_header(root_no,",
"read(self): pps = self.parent.pps[self.node] sb = pps['Start'] size = pps['Size'] if size >=",
"중인가? if isinstance(input_data, types.StringType): if os.path.exists(input_data): self.isfile = True self.fname = input_data self.fp",
"'' for ch in och: ret_str += struct.pack('<H', ch) # print ret_str.decode('UTF-16LE', 'replace')",
"bloc 크기 구하기 self.bsize = 1 << kavutil.get_uint16(self.mm, 0x1e) self.ssize = 1 <<",
"False: return False if self.verbose: print kavutil.vprint('Property Storage') ''' print ' %-2s %-20s",
"or False) # 리턴값 : 0 - 성공, 0 이외의 값 - 실패",
"filename) # 압축 파일의 핸들을 얻는다. # 입력값 : filename - 파일 이름",
"loop = False for x in num_list: if e + 1 == x:",
"t_link[i+1] data = struct.pack('<L', no) no = t_link[i] bbd = bbd[:no*4] + data",
"num_of_xbbd_blocks == 0: data = struct.pack('<LL', last_no, total_xbbd_num) self.mm = self.mm[:0x44] + data",
"# --------------------------------------------------------------------- # SBD를 수정한다. # sbd : 수정된 SBD 이미지 # ---------------------------------------------------------------------",
"None self.init(buf) def init(self, buf): # OLE 주요 데이터 self.mm = buf self.bsize",
"for i, no in enumerate(bbd_link) if (no == 0xffffffff)] if old_link: ret_link =",
"+ self.bsize:] # XBBD 생성하기 for i in range(x_num): x_data += '\\xff\\xff\\xff\\xff' *",
"for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd += get_bblock(self.mm, no, self.bsize)",
"off*4) # print hex(t) # BBD List에 BBD 등록하기 for i, no in",
">= 0x1000: self.read_size = self.parent.bsize self.fat = self.parent.bbd_fat else: self.read_size = self.parent.ssize self.fat",
"self.bsize: self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] return True return False #",
"링크 끝 설정하기 # 남은 링크는 모두 0xffffffff로 설정하기 for i in t[1:]:",
"적용하기 return ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD를 수정한다. # sbd",
"= (self.parent.small_block[n / div_n] + 1) * self.parent.bsize off += (n % div_n)",
"& 0x4 == 0x4)} except Error: pass o.close() return ret # --------------------------------------------------------------------- #",
"Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not",
"1) * self.bsize] + self.mm[off + self.bsize:] # --------------------------------------------------------------------- # 특정 데이터를 small",
"name = prefix + '/' + pps_name # print (\"%02d : %d %s\")",
"--------------------------------------------------------------------- def uninit(self): # 플러그인 엔진 종료 return 0 # 플러그인 엔진 종료",
"만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # 기존에는",
"'\\xff\\xff\\xff\\xff' * ((self.bsize/4) - 1) if i != (x_num-1): x_data += struct.pack('<L', last_no+1)",
"of SBD Blocks', '%d' % num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd, 0, 0x80) # PPS 읽기",
"def __write_data_to_small_bolck(self, t_data, t_link): for i, n in enumerate(t_link): off = (self.small_block[n /",
"* size) # 모든 데이터를 0으로 Wipe t = ow.delete(no) if t: self.init(t)",
"하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_sbd_link(self, org_link_list, num_link): if len(org_link_list) >",
"* self.bsize self.mm = self.mm[:off] + data + self.mm[off + self.bsize:] if __name__",
"링크를 처음 생성하므로 이전 링크가 없다. t_link = self.__modify_big_block_link(None, t_num) # Big block",
"# 특정 노드의 Max 값을 가진 node를 찾기 no = node while True:",
"try: self.__get_pps_path() except IndexError: pass # small block link 얻기 self.small_block = get_block_link(self.pps[0]['Start'],",
"1) - last_no if n >= num: # 잔여 개수가 추가하려는 개수보다 많거나",
"parse(self): buf = self.mm[:8] if buf != 'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not Ole signature') #",
"만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data),",
"self.fat = self.parent.bbd_fat else: self.read_size = self.parent.ssize self.fat = self.parent.sbd_fat list_array = get_block_link(sb,",
"ret_str.decode('UTF-16LE', 'replace') return ret_str # --------------------------------------------------------------------- # OLE 내부 링크 구하기 # ---------------------------------------------------------------------",
"start) + buf[t_off + 4:] if pps_prev is not None: t_off = off",
"'' p['Type'] = ord(pps[0x42]) p['Prev'] = kavutil.get_uint32(pps, 0x44) p['Next'] = kavutil.get_uint32(pps, 0x48) p['Dir']",
"off == 0x180: buf = buf[:off] + '\\x00' * 0x80 elif del_info: buf",
"/ 4 개수만큼 Big Block을 담을 수 있음 b_num = (add_num / (self.bsize/4))",
"self.mm # --------------------------------------------------------------------- # 특정 데이터를 big block 링크를 따라 데이터 쓰기 (내장)",
"양쪽 노드가 존재하는가? if prev_no != 0xffffffff and next_no != 0xffffffff: # 양쪽",
"= 'ole' # 엔진 파일 이름 info['make_arc_type'] = kernel.MASTER_PACK # 악성코드 치료 후",
"됨) # old_link : 기존 SBD link # add_num : 추가 SBD link",
"- 압축 가능 엔진 ID # arc_name - 최종적으로 압축될 압축 파일 이름",
"for i, pps in enumerate(self.pps): if pps['Prev'] == node or pps['Next'] == node",
"추가 SBD link 개수 # --------------------------------------------------------------------- def __modify_small_block_link(self, old_link, add_num): if add_num <",
": 압축 파일 핸들 # --------------------------------------------------------------------- def __get_handle(self, filename): if filename in self.handle:",
"쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_small_bolck(self, t_data, t_link): for i, n in enumerate(t_link):",
"print kavutil.HexDump().Buffer(self.mm, (next_b+1) * self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) ''' if len(self.bbd_list_array)/4 <",
"self.root_list_array = None self.exploit = [] # 취약점 존재 여부 # 임시 변수",
"% ('No', 'Name', 'Type', 'Prev', 'Next', 'Dir', 'SB', 'Size') print ' ' +",
"off = (t_idx % ((bsize / 4) - 1)) next_b = xbbd_start_block for",
"= [] if len(self.pps) == 0: # 분석된 PPS가 없으면 종료 return False",
"(len(t_link) * self.ssize) t_num = (t_size / self.ssize) + (1 if (t_size %",
"len(t_data) - (len(t_link) * self.ssize) t_num = (t_size / self.ssize) + (1 if",
"if arc_engine_id == 'arc_ole': o = self.__get_handle(arc_name) fp = o.openstream(fname_in_arc) try: data =",
"사용하지 않는 블록을 수집한다. t_link = self.__modify_big_block_link(t_link, t_num) # Big block 영역에 bsize",
"ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: #",
"얻는다. # 입력값 : filename - 파일 이름 # 리턴값 : 압축 파일",
"엔진의 위치 # verbose - 디버그 모드 (True or False) # 리턴값 :",
"if pps[0x50:0x60] in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return False self.pps.append(p) # PPS Tree 검증 if",
"0x00 and ord(pps[1]) == 0x00: name = '_\\x00' + pps[2:t_size-2] else: name =",
"bb_num # print hex(no), hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no = kavutil.get_uint32(bbd_list_array, seg*4) t_off",
"리스트 # --------------------------------------------------------------------- def listvirus(self): # 진단 가능한 악성코드 리스트 vlist = list()",
"d + d o.write_stream('FileHeader', d) o.close() ''' ''' # case1 o = OleFile('normal.hwp',",
"벗어나면 에러 return -1 if idx <= 109: return 0x4c + (idx *",
"self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # 스트림 또는 스토리지를 삭제한다. #",
"0x80) if del_info and off == 0x180: buf = buf[:off] + '\\x00' *",
"이미지 # --------------------------------------------------------------------- def __modify_sbd(self, sbd): # 원래 이미지에 SBD 덮어쓰기 sbd_no =",
"get_block_link(no, bbd_or_sbd_fat): ret = [] fat = bbd_or_sbd_fat next_b = no if next_b",
"num_link: return org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- # BBD 링크를 줄인다",
"t[1:]: t_link[i] = 0xffffffff # BBD 배열을 BBD 버퍼로 바꾸기 self.bbd = ''",
"0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix) if self.pps[node]['Next'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix) return 0 # ---------------------------------------------------------------------",
"0x4 == 0x4)} except Error: pass o.close() return ret # --------------------------------------------------------------------- # __get_handle(self,",
"출력시 이름이 깨질 가능성이 큼 if ord(pps[0]) & 0xF0 == 0x00 and ord(pps[1])",
"# OLE 파일을 분석 def close(self): if self.isfile: self.fp.close() if self.write_mode: open(self.fname, 'wb').write(self.mm)",
"seg*4) t_off = ((t_no + 1) * self.bsize) + (off * 4) self.mm",
"* self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data)",
"arc_engine_id, arc_name, file_infos): if arc_engine_id == 'arc_ole': o = OleFile(arc_name, write_mode=True) # ,",
"and ord(pps[1]) == 0x00: name = '_\\x00' + pps[2:t_size-2] else: name = pps[0:t_size-2]",
"os.path.exists(input_data): self.isfile = True self.fname = input_data self.fp = open(input_data, 'rb') buf =",
"%-35s %d %22s %8d' % (self.pps.index(p), tname, p['Type'], t, p['Size']) # PPS 전체",
"xbbd_start_block for i in range(seg): if next_b == 0xfffffffe: return -1 t_buf =",
"i, pps in enumerate(self.pps): if pps['Prev'] == node or pps['Next'] == node or",
"(add_num / (self.bsize / 4)) + (1 if (add_num % (self.bsize / 4))",
"self.bbd_fat = {} self.sbd = None self.root = None self.pps = None self.small_block",
"'replace') else: p['Name'] = '' p['Type'] = ord(pps[0x42]) p['Prev'] = kavutil.get_uint32(pps, 0x44) p['Next']",
"블록 수 추가하기 # 수집된 마지막 링크 이후에 존재하는 사용하지 않는 블록을 수집한다.",
"추가해야 할 BBD list 개수는 한개의 BBD에는 bsize / 4 개수만큼 Big Block을",
"= kavutil.get_uint32(self.mm, 0x30) root_list_array = get_block_link(root_startblock, self.bbd_fat) self.root_list_array = root_list_array self.root = ''",
"# print hex(no), hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no = kavutil.get_uint32(bbd_list_array, seg*4) t_off =",
"self.bbd = bbd # 체크 !!! bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) for",
"__init__(self, input_data, write_mode=False, verbose=False): self.verbose = verbose # 디버깅용 self.isfile = False #",
"이전 SBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.sbd) # 이전 링크",
"플러그인 엔진 초기화 self.handle = {} self.verbose = verbose return 0 # 플러그인",
"새롭게 OLE 재로딩 elif target_pps['Valid'] and target_pps['Type'] == 1 and delete_storage: # 유효한",
"self.mm = buf self.bsize = 0 self.ssize = 0 # 임시 변수 self.__deep",
"파일인지 확인한다. # --------------------------------------------------------------------- def is_olefile(filename): try: buf = open(filename, 'rb').read(8) if buf",
"Big Block을 주어진 개수만큼 추가한다. # num : 추가할 Big Block 개수 #",
"data = None return data # --------------------------------------------------------------------- # arcclose(self) # 압축 파일 핸들을",
"self.bsize) * self.bsize # 파일 크기 self.mm = self.mm[:size] # 뒤쪽 쓸모 없는",
"data 뒤쪽에 추가하기 t_num = len(t_data) / self.bsize # 몇개의 블록이 필요한가? self.__add_big_block_num(t_num)",
"갱신 self.bbd_fat = {} for i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd,",
"= self.__get_root_node(del_no) # 양쪽 노드가 존재하는가? if prev_no != 0xffffffff and next_no !=",
"뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_bbd_link(t_link,",
"변수 self.__deep = 0 self.__full_list = [] self.parse() # OLE 파일을 분석 def",
"/ self.ssize) + (1 if (t_size % self.ssize) else 0) self.__add_small_block_num(t_num) # 필요한",
"sbd_list_array: self.sbd += get_bblock(self.mm, no, self.bsize) self.sbd_fat = {} for i in range(len(self.sbd)",
"--------------------------------------------------------------------- def __write_data_to_small_bolck(self, t_data, t_link): for i, n in enumerate(t_link): off = (self.small_block[n",
"'Dir', 'SB', 'Size') print ' ' + ('-' * 74) for p in",
"error('Not Support : SBD -> BBD') # 섹터가 변화는 것은 Dec, Inc가 의미",
"t_link) # PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전",
"# t_link = get_block_link(r_no, self.bbd) # 이전 Small Block의 링크를 구함 t_link =",
"p['Type'], p['Prev'], p['Next'], p['Dir'], p['Start'], p['Size']) ''' print ' %-2s %-32s %4s %-4s",
"= kavutil.get_uint32(bbd_list_array, i*4) self.bbd += get_bblock(self.mm, n, self.bsize) # 새로운 Small Block 링크가",
"- 압축 파일 # fname_in_arc - 압축 해제할 파일 이름 # 리턴값 :",
"# --------------------------------------------------------------------- # OleFile 클래스 # --------------------------------------------------------------------- class OleFile: def __init__(self, input_data, write_mode=False,",
"get_block_link(sbd_no, self.bbd_fat) # print sbd_list_array for i, no in enumerate(sbd_list_array): data = sbd[i*self.bsize:(i+1)*self.bsize]",
"# print sbd_list_array for i, no in enumerate(sbd_list_array): data = sbd[i*self.bsize:(i+1)*self.bsize] off =",
"SBD (Inc)') # 작업 완료 n = (len(data) / self.ssize) + (1 if",
"수정 self.__set_pps_header(no, size=len(data)) else: # 기존에는 SBD 사용 # raise error('Not Support :",
"수정 됨) # old_link : 기존 BBD link # add_num : 추가 BBD",
"((bsize / 4) - 1)) next_b = xbbd_start_block for i in range(seg): if",
"생성하므로 이전 링크가 없다. t_link = self.__modify_big_block_link(None, t_num) # Big block 영역에 bsize",
"= 0 # 임시 변수 self.__deep = 0 self.__full_list = [] self.parse() #",
"' % p['Next'] t += ' - ' if p['Dir'] == 0xffffffff else",
"= self.bsize - n sbd += '\\xff' * t if self.verbose: open('sbd.dm3', 'wb').write(sbd)",
"= None self.small_block = None self.root_list_array = None self.exploit = [] # 취약점",
"Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no,",
"off + 0x4C buf = buf[:t_off] + struct.pack('<L', pps_dir) + buf[t_off + 4:]",
"no == -1: raise Error('PPS name is invalid.') # print no ow =",
"# 리턴값 : 악성코드 리스트 # --------------------------------------------------------------------- def listvirus(self): # 진단 가능한 악성코드",
"data = '' if size >= 0x1000: t_list = list(list_array) while len(t_list): s,",
"add_big_num = (size / self.bsize) + (1 if (size % self.bsize) else 0)",
"return False # --------------------------------------------------------------------- # OleFile 클래스 # --------------------------------------------------------------------- class OleFile: def __init__(self,",
"1 and storages: ret.append(p['Name']) else: pass return ret # --------------------------------------------------------------------- # 스트림이 존재하는가?",
"* self.bsize # t_data의 위치 self.mm = self.mm[:off] + t_data + self.mm[off +",
"self.verbose = verbose return 0 # 플러그인 엔진 초기화 성공 # --------------------------------------------------------------------- #",
"# --------------------------------------------------------------------- def __write_data_to_small_bolck(self, t_data, t_link): for i, n in enumerate(t_link): off =",
"한꺼번에 파일로 읽기 off = (s + 1) * self.read_size data += self.parent.mm[off:off",
"t_data의 위치 self.mm = self.mm[:off] + t_data + self.mm[off + self.bsize:] # XBBD",
"처리 if self.verbose: print if num_of_bbd_blocks < 109: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 4)",
"pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True) return self.mm def write(self, no, data): # 기존 PPS 정보를",
"수집한다. t_link = self.__modify_small_block_link(t_link, t_num) # Small block 갱신 self.bbd_fat = {} for",
"self.mm[off + self.bsize:] # XBBD 생성하기 for i in range(x_num): x_data += '\\xff\\xff\\xff\\xff'",
"PPS만 처리함 return 0 pps_name = self.pps[node]['Name'].encode('cp949', 'ignore') name = prefix + '/'",
"--------------------------------------------------------------------- # PPS 전체 경로 구하기 (스트림만 출력) # --------------------------------------------------------------------- def listdir(self, streams=True,",
"fileformat): file_scan_list = [] # 검사 대상 정보를 모두 가짐 # 미리 분석된",
"BBD -> BBD (Inc)') n = (len(data) / self.bsize) + (1 if (len(data)",
"kavutil.HexDump().Buffer(self.mm, (next_b+1) * self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) ''' if len(self.bbd_list_array)/4 < num_of_bbd_blocks:",
"self.parent.bsize off += (n % div_n) * self.parent.ssize data += self.parent.mm[off:off + self.read_size]",
"# format(self, filehandle, filename, filename_ex) # 파일 포맷을 분석한다. # 입력값 : filehandle",
"Blocks', '%d' % root_startblock) print kavutil.HexDump().Buffer(self.root, 0, 0x80) # sbd 읽기 sbd_startblock =",
"range(num_of_bbd_blocks): no = kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd += get_bblock(self.mm, no, self.bsize) self.bbd_fat = {}",
"[] try: self.__get_pps_path() except IndexError: pass # small block link 얻기 self.small_block =",
"BBD 이미지 # --------------------------------------------------------------------- def __modify_bbd(self, bbd): self.bbd = bbd # 체크 !!!",
"= no if next_b != 0xfffffffe: ret.append(next_b) while True: try: next_b = fat[next_b]",
"self.pps[node]['Prev'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix) if self.pps[node]['Next'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix) return 0",
"return Stream(self, no) # --------------------------------------------------------------------- # 스트림의 데이터를 덮어쓴다. # --------------------------------------------------------------------- def write_stream(self,",
"self.read_size = self.parent.ssize self.fat = self.parent.sbd_fat list_array = get_block_link(sb, self.fat) data = ''",
"부족함. 따라서 Root를 늘려야 함 size = num * self.ssize # 추가해야 할",
"t_num = (t_size / self.ssize) + (1 if (t_size % self.ssize) else 0)",
"엔진 파일 이름 info['make_arc_type'] = kernel.MASTER_PACK # 악성코드 치료 후 재압축 유무 info['sig_num']",
"self.handle[filename] = zfile return zfile # --------------------------------------------------------------------- # arclist(self, filename, fileformat) # 압축",
"--------------------------------------------------------------------- def __modify_sbd(self, sbd): # 원래 이미지에 SBD 덮어쓰기 sbd_no = kavutil.get_uint32(self.mm, 0x3c)",
"p['Dir'], p['Start'], p['Size']) ''' print ' %-2s %-32s %4s %-4s %-4s %-4s %8s",
"self.parent.bsize / self.parent.ssize off = (self.parent.small_block[n / div_n] + 1) * self.parent.bsize off",
"수집하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 bbd = self.bbd for",
"else: # SBD를 사용한다. if org_size >= 0x1000: # 기존에는 BBD 사용 #",
"- 109) total_xbbd_num = (t_num / ((self.bsize - 4) / 4)) + (1",
"Offset으로 리턴한다. # --------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block =",
"t_link[t[0]] = 0xfffffffe # 링크 끝 설정하기 # 남은 링크는 모두 0xffffffff로 설정하기",
"raise Error('PPS name(%s) is invalid.' % name) # self.init(self.mm) # return ow =",
"OLE 내부 링크 구하기 # --------------------------------------------------------------------- def get_block_link(no, bbd_or_sbd_fat): ret = [] fat",
"% self.ssize) print kavutil.HexDump().Buffer(self.mm, 0, 0x60) print if self.bsize % 0x200 != 0",
"노드 # 1. 0xffffffff 노드 값을 root로 보낸다. t_no = 0xffffffff # root",
"if buf != 'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not Ole signature') # big block, small bloc",
"' ' + '%2d %-35s %d %22s %8d' % (self.pps.index(p), tname, p['Type'], t,",
"True except IOError: pass return False # --------------------------------------------------------------------- # OleFile 클래스 # ---------------------------------------------------------------------",
"수정된 SDB 적용하기 return ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD를 수정한다.",
"kavutil.get_uint32(self.mm, 0x48) # 추가적인 Big Block을 계산한다. BBD List와 XBBD 블록도 추가될 수",
"else: # raise error('Not Support : BBD -> BBD (Inc)') n = (len(data)",
"i*4) self.bbd += get_bblock(self.mm, no, self.bsize) self.bbd_fat = {} for i in range(len(self.bbd)",
"4) # --------------------------------------------------------------------- # OLE 파일인지 확인한다. # --------------------------------------------------------------------- def is_olefile(filename): try: buf",
"= bbd_fat self.sbd = sbd self.sbd_fat = sbd_fat self.root_list_array = root_list_array self.small_block =",
"전체 BBD list 개수 self.mm = self.mm[:0x2c] + struct.pack('<L', total_bbd_num) + self.mm[0x30:] last_no",
"0xffffffff: if self.pps[x]['Next'] in scaned_pps_node: self.pps[x]['Next'] = 0xffffffff else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir']",
"0x40: # 이상 파일 정보 처리 return False # bbd 읽기 self.bbd_list_array, num_of_bbd_blocks,",
"big block 링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_big_block(self, t_data, t_link):",
"* ((self.bsize/4) - 1) if i != (x_num-1): x_data += struct.pack('<L', last_no+1) #",
"hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no = kavutil.get_uint32(bbd_list_array, seg*4) t_off = ((t_no + 1)",
"only one charecter can be decoded ch = MsiBase64Encode(ch - 0x4800) if not",
"kavutil.get_uint32(pps, 0x44) p['Next'] = kavutil.get_uint32(pps, 0x48) p['Dir'] = kavutil.get_uint32(pps, 0x4c) p['Start'] = kavutil.get_uint32(pps,",
"sbd_link.append(kavutil.get_uint32(sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link = [i for i,",
"처음 생성하므로 이전 링크가 없다. t_link = self.__modify_small_block_link(None, t_num) bbd_list_array, _, _, _",
"t_link = [] for i in range(len(self.bbd) / 4): t_link.append(kavutil.get_uint32(self.bbd, i * 4))",
"오류 메시지를 정의 # ------------------------------------------------------------------------- class Error(Exception): pass # --------------------------------------------------------------------- # MisiBase64 인코더",
"val = ord(d[0x24]) ret['ff_hwp'] = {'compress': (val & 0x1 == 0x1), 'encrypt': (val",
"+ sbd[(no+1)*4:] no = t_link[-1] sbd = sbd[:no * 4] + '\\xfe\\xff\\xff\\xff' +",
"self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) ''' # 상당히 많은 데이터가",
"fat = self.bbd else: # read_size = self.ssize fat = self.sbd # org_list_array",
"add_num += x_num b_num = (add_num / (self.bsize / 4)) + (1 if",
"= prefix + pps_name else: if self.pps[node]['Valid'] is False: # 유효한 PPS만 처리함",
"% self.bsize if n: t = self.bsize - n sbd += '\\xff' *",
"# 플러그인 엔진을 초기화 한다. # 인력값 : plugins_path - 플러그인 엔진의 위치",
"if idx >= num_of_bbd_blocks: # 범위를 벗어나면 에러 return -1 if idx <=",
"* self.bsize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb,",
"filename, filename_ex): ret = {} mm = filehandle # OLE 헤더와 동일 if",
"추가적인 Big Block을 계산한다. BBD List와 XBBD 블록도 추가될 수 있기 때문에... old_b_num",
"몇개의 블록이 필요한가? self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # SBD 링크를 처음",
"입력값 : arc_engine_id - 압축 가능 엔진 ID # arc_name - 최종적으로 압축될",
"= '1.0' # ------------------------------------------------------------------------- # 엔진 오류 메시지를 정의 # ------------------------------------------------------------------------- class Error(Exception):",
"읽기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks = kavutil.get_uint32(self.mm, 0x40) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat)",
"i*4) self.bbd_fat[i] = n if self.verbose: open('bbd.dmp', 'wb').write(self.bbd) print kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd, 0,",
"노드가 존재하는가? if prev_no != 0xffffffff and next_no != 0xffffffff: # 양쪽 모두",
"in o.listdir(): file_scan_list.append(['arc_ole', name]) return file_scan_list except: pass return [] # --------------------------------------------------------------------- #",
"and next_no != 0xffffffff: # 양쪽 모두 노트가 존재함 # 1. prev 노드",
"= True if len(f) == 0: # 정상적인 PPS가 없음 return False while",
"= pps['Start'] size = pps['Size'] if size >= 0x1000: self.read_size = self.parent.bsize self.fat",
"# 수정 모드 self.write_mode = write_mode # OLE 주요 데이터 self.mm = None",
"t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 sbd = self.sbd for no",
"# 입력값 : arc_engine_id - 압축 엔진 ID # arc_name - 압축 파일",
"buf = buf[:t_off] + struct.pack('<L', size) + buf[t_off + 4:] if start is",
"'rb') as fp: buf = fp.read() # print '[-] filename :', rname, len(buf)",
"위치에 1개의 Big Block Overwrite하기 (내장) # --------------------------------------------------------------------- def __set_bblock(self, no, data): off",
"# OLE 뒤에 첨부된 파일이 있는지를 조사한다. fsize = len(mm) bsize = 1",
"get_bblock(buf, next_b, bsize) next_b = kavutil.get_uint32(t_buf, bsize-4) return (next_b + 1) * bsize",
"self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) t = ow.write(no,",
"<= 0x4840: if ch >= 0x4800: # 0x4800 - 0x483F # only one",
"start = num_list.pop(0) e = start loop = False for x in num_list:",
"0xffffffff else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir'] != 0xffffffff: if self.pps[x]['Dir'] in scaned_pps_node: self.pps[x]['Dir']",
"OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) t =",
"else: old_b_num = b_num total_bbd_num = old_num_bbd + b_num # 전체 BBD list",
"self.mm = (self.mm[:off] + struct.pack('<L', no) + self.mm[off+4:]) # --------------------------------------------------------------------- # Small Block을",
"var -> foo d = zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d) o.close() ''' # ------------------------------------------------------------------------- #",
"s, e = self.get_liner_value(t_list) # 연속된 링크를 모두 수집해서 한꺼번에 파일로 읽기 off",
"self.bbd = '' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd +=",
"BBD 사용 # raise error('Not Support : BBD -> SBD') # 섹터가 변화는",
"kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if idx >= num_of_bbd_blocks: #",
"플러그인 엔진 종료 성공 # --------------------------------------------------------------------- # getinfo(self) # 플러그인 엔진의 주요 정보를",
"구하기 self.__deep = 0 self.__full_list = [] try: self.__get_pps_path() except IndexError: pass #",
"root 노드를 수정한다. pps = self.pps[root_no] if pps['Prev'] == del_no: self.__set_pps_header(root_no, pps_prev=t_no) elif",
"노드의 Max 값을 가진 node를 찾기 no = node while True: pps =",
"< r_size / self.ssize)] if len(free_link) >= num: # 여유분이 충분히 존재함... return",
"return i def __get_max_node(self, node): # 특정 노드의 Max 값을 가진 node를 찾기",
"self.verbose: open('bbd.dm2', 'wb').write(bbd) bbd_link = [] for i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd,",
"''' print ' %-2s %-32s %4s %-4s %-4s %-4s %8s %8s' % ('No',",
"get_block_link(org_sb, self.bbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크",
"0x483F # only one charecter can be decoded ch = MsiBase64Encode(ch - 0x4800)",
"0x44) num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if verbose:",
"* self.ssize) < len(t_data): # 블록 추가해야 하나? t_size = len(t_data) - (len(t_link)",
"Stream(self, no) # --------------------------------------------------------------------- # 스트림의 데이터를 덮어쓴다. # --------------------------------------------------------------------- def write_stream(self, name,",
"enumerate(bbd_link) if (no == 0xffffffff and i < size / self.bsize)] if len(free_link)",
"BBD에 링크 연결하기 for i in range(len(t_link)-1): no = t_link[i+1] data = struct.pack('<L',",
"self.bbd_fat = {} for i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i",
"self.pps[no] if target_pps['Valid'] and target_pps['Type'] == 2: # 유효한 PPS에 대한 삭제인지 확인",
"= t_link[i] bbd = bbd[:no*4] + data + bbd[(no+1)*4:] no = t_link[-1] bbd",
"fat = self.sbd # org_list_array = get_block_link(org_sb, fat) ''' # 수정된 data를 쓰기",
"if org_size >= 0x1000: # 기존에는 BBD 사용 # raise error('Not Support :",
"4] + '\\xfe\\xff\\xff\\xff' + bbd[(no + 1) * 4:] if self.verbose: open('bbd.dm3', 'wb').write(bbd)",
"pps_next is not None: t_off = off + 0x48 buf = buf[:t_off] +",
"self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return org_link_list else: raise Error('Invalid",
"buf = buf[:t_off] + struct.pack('<L', pps_prev) + buf[t_off + 4:] if pps_next is",
"존재 # 1. next 노드 값을 root로 보낸다. t_no = next_no else: #",
"self.bbd_list_array = None self.bbd = None self.bbd_fat = {} self.sbd = None self.root",
"<< kavutil.get_uint16(self.mm, 0x1e) self.ssize = 1 << kavutil.get_uint16(self.mm, 0x20) if self.verbose: kavutil.vprint('Header') kavutil.vprint(None,",
"Blocks', '%d' % num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd, 0, 0x80) # PPS 읽기 self.pps =",
"= [] for i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) # 사용하지 않는",
"= 0xffffffff else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir'] != 0xffffffff: if self.pps[x]['Dir'] in scaned_pps_node:",
"old_b_num == b_num: break else: old_b_num = b_num total_bbd_num = old_num_bbd + b_num",
"self.mm[off + self.ssize:] # --------------------------------------------------------------------- # OLE 영역의 특정 위치에 1개의 Big Block",
"off = no % bb_num # print hex(no), hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no",
"if (t_num % ((self.bsize - 4) / 4)) else 0) x_num = total_xbbd_num",
"Stream: def __init__(self, parent, node): self.parent = parent self.node = node self.read_size =",
"모두 0xffffffff로 설정하기 for i in t[1:]: t_link[i] = 0xffffffff # BBD 배열을",
"------------------------------------------------------------------------- class KavMain: # --------------------------------------------------------------------- # init(self, plugins_path) # 플러그인 엔진을 초기화 한다.",
"i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i * 4) self.bbd_fat[i] =",
"= prefix + '/' + pps_name # print (\"%02d : %d %s\") %",
"o.sbd) # d2 = pics.read() o.close() ''' # XBBD 늘어나는 경우 # o",
"ch: continue else: # 0x3800 - 0x383F # the value contains two characters",
"''' # ------------------------------------------------------------------------- # KavMain 클래스 # ------------------------------------------------------------------------- class KavMain: # --------------------------------------------------------------------- #",
"t_link = self.__decrease_sbd_link(t_link, n) # 필요한 개수로 링크 줄이기 # Small block 영역에",
"num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) # num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48)",
"in enumerate(bbd_link) if (no == 0xffffffff)] if old_link: ret_link = old_link + free_link[:add_num]",
"name in o.listdir(): file_scan_list.append(['arc_ole', name]) return file_scan_list except: pass return [] # ---------------------------------------------------------------------",
"따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_big_block(self, t_data, t_link): for i, n",
"= '\\xff' * self.bsize * b_num for i in range(b_num): bbd_no.append(last_no) last_no +=",
"노드를 수정한다. pps = self.pps[root_no] if pps['Prev'] == del_no: self.__set_pps_header(root_no, pps_prev=t_no) elif pps['Next']",
"get_bblock(self.mm, no, self.bsize) if self.verbose: open('bbd.dm2', 'wb').write(bbd) bbd_link = [] for i in",
"self.sbd_fat, self.root_list_array, self.small_block, self.verbose) t = ow.write(no, data) if t: self.init(t) # 새롭게",
": 추가 BBD link 개수 # --------------------------------------------------------------------- def __modify_big_block_link(self, old_link, add_num): if add_num",
"필요한 블록 수 추가하기 # 수집된 마지막 링크 이후에 존재하는 사용하지 않는 블록을",
"= zipfile.ZipFile(arc_name, 'w') for file_info in file_infos: rname = file_info.get_filename() a_name = file_info.get_filename_in_archive()",
"__write_data_to_small_bolck(self, t_data, t_link): for i, n in enumerate(t_link): off = (self.small_block[n / 8]",
"% p['Prev'] t += ' - ' if p['Next'] == 0xffffffff else '%4d",
"# 이전 링크가 없다면... ret_link = free_link[:add_num] # 최종 결과의 BBD 링크 t_link",
"수정한다. # bbd : 수정된 BBD 이미지 # --------------------------------------------------------------------- def __modify_bbd(self, bbd): self.bbd",
"위해 처리 f = [] if len(self.pps) == 0: # 분석된 PPS가 없으면",
"self.pps[node]['Type']} self.__full_list.append(p) if self.pps[node]['Dir'] != 0xFFFFFFFFL: self.__deep += 1 self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep -=",
"Big Block 개수 self.__add_big_block_num(add_big_num) # Big Block 추가 요청 # t_link = get_block_link(r_no,",
"= kavutil.get_uint32(self.mm, 0x3c) # sbd_list_array = get_block_link(sbd_no, self.bbd) sbd_list_array = get_block_link(sbd_no, self.bbd_fat) #",
"CLSID가 존재함 # 참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28',",
"추가하기 # SBD 링크를 처음 생성하므로 이전 링크가 없다. t_link = self.__modify_small_block_link(None, t_num)",
"0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 <<",
"KavMain 클래스 # ------------------------------------------------------------------------- class KavMain: # --------------------------------------------------------------------- # init(self, plugins_path) # 플러그인",
"self.mm[0x30:] last_no += 1 # XBBD 처리하기 if total_bbd_num > 109: t_num =",
"= ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if pps[0x50:0x60] in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return",
"kavutil.vprint(None, 'Num of XBBD Blocks', '%d' % num_of_xbbd_blocks) if num_of_bbd_blocks > 109: #",
"# 이전 링크 수집하기 t_link = self.__decrease_bbd_link(t_link, n) # 필요한 개수로 링크 줄이기",
"self.fat = self.parent.sbd_fat list_array = get_block_link(sb, self.fat) data = '' if size >=",
"+= ' - ' if p['Dir'] == 0xffffffff else '%4d ' % p['Dir']",
"return True except IOError: pass return False # --------------------------------------------------------------------- # OleFile 클래스 #",
"old_b_num = b_num total_bbd_num = old_num_bbd + b_num # 전체 BBD list 개수",
"== '__main__': # import zlib # o = OleFile('normal.hwp', write_mode=True, verbose=True) o =",
"선언 info['author'] = '<NAME>' # 제작자 info['version'] = '1.1' # 버전 info['title'] =",
"n: t = self.bsize - n sbd += '\\xff' * t if self.verbose:",
"self.bsize) off = ((node % 4) * 0x80) if del_info and off ==",
"SBD 링크를 줄인다 # org_link_list : 기존 Small block 링크 # num_link :",
"enumerate(sbd_link) if (no == 0xffffffff)] if old_link: ret_link = old_link + free_link[:add_num] #",
"def __get_handle(self, filename): if filename in self.handle: # 이전에 열린 핸들이 존재하는가? zfile",
"가능한 악성코드 리스트 vlist = list() # 리스트형 변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는",
"읽기 root_startblock = kavutil.get_uint32(self.mm, 0x30) root_list_array = get_block_link(root_startblock, self.bbd_fat) self.root_list_array = root_list_array self.root",
"설정 크기 # start : 시작 링크 # --------------------------------------------------------------------- def __set_pps_header(self, node, size=None,",
"(t_size % self.bsize) else 0) self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된",
"get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_sbd_link(t_link, n) # 필요한 개수로",
"prev_no == 0xffffffff and next_no != 0xffffffff: # Next만 존재 # 1. next",
"# --------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44)",
"else 0) x_num = total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수 #",
"# 정상적인 PPS가 없음 return False while len(f): x = f.pop(0) try: if",
"뒤에 붙어 있는 잔여 데이터 # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _,",
"small_block, verbose): self.verbose = verbose self.mm = mm self.pps = pps self.bsize =",
"ow.delete(no) if t: self.init(t) # 새롭게 OLE 재로딩 elif target_pps['Valid'] and target_pps['Type'] ==",
"-1 t_buf = get_bblock(buf, next_b, bsize) next_b = kavutil.get_uint32(t_buf, bsize-4) return (next_b +",
"input_data else: raise Error('Input data is invalid.') # 수정 모드 self.write_mode = write_mode",
"--------------------------------------------------------------------- def __modify_small_block_link(self, old_link, add_num): if add_num < 0: return [] sbd =",
"except IOError: pass return False # --------------------------------------------------------------------- # OleFile 클래스 # --------------------------------------------------------------------- class",
"# 이전 SBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.sbd) # 이전",
"small block 링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_small_bolck(self, t_data, t_link):",
"# 전체 BBD list 개수 self.mm = self.mm[:0x2c] + struct.pack('<L', total_bbd_num) + self.mm[0x30:]",
"b_num - 109) total_xbbd_num = (t_num / ((self.bsize - 4) / 4)) +",
"'wb').write(sbd) # SBD 링크를 생성한다. sbd_link = [] for i in range(len(sbd) /",
"def get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) # num_of_xbbd_blocks",
"# 필요한 블록 수 추가하기 # 수집된 마지막 링크 이후에 존재하는 사용하지 않는",
"+ 1) * self.bsize self.mm = self.mm[:off] + data + self.mm[off + self.bsize:]",
"+= self.parent.mm[off:off + self.read_size * (e - s + 1)] else: for n",
"pps_prev) + buf[t_off + 4:] if pps_next is not None: t_off = off",
"Stream만 저장 p = {'Node': node, 'Name': name[1:], 'Type': self.pps[node]['Type']} self.__full_list.append(p) if self.pps[node]['Dir']",
"* 0x80) if del_info and off == 0x180: buf = buf[:off] + '\\x00'",
"BBD 버퍼로 바꾸기 self.bbd = '' for i in t_link: self.bbd += struct.pack('<L',",
"start : 시작 링크 # --------------------------------------------------------------------- def __set_pps_header(self, node, size=None, start=None, pps_prev=None, pps_next=None,",
"== node or pps['Dir'] == node: return i def __get_max_node(self, node): # 특정",
"읽기 off = (s + 1) * self.read_size data += self.parent.mm[off:off + self.read_size",
"self.mm[:off] + t_data + self.mm[off + self.bsize:] # XBBD 생성하기 for i in",
"for no in root_list_array: self.root += get_bblock(self.mm, no, self.bsize) if self.verbose: open('root.dmp', 'wb').write(self.root)",
"pass # --------------------------------------------------------------------- # MisiBase64 인코더 디코더 # --------------------------------------------------------------------- def MsiBase64Encode(x): ct =",
"0xffffffff로 설정하기 for i in t[1:]: t_link[i] = 0xffffffff # BBD 배열을 BBD",
"링크 수집하기 t_link = self.__decrease_sbd_link(t_link, n) # 필요한 개수로 링크 줄이기 # Small",
"# 몇개의 블록이 필요한가? self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # SBD 링크를",
"사용하지 않는 블록을 수집한다. t_link = self.__modify_small_block_link(t_link, t_num) # Small block 갱신 self.bbd_fat",
"- ' if p['Prev'] == 0xffffffff else '%4d ' % p['Prev'] t +=",
"+ '\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:] self.__modify_bbd(bbd) else: # 기존에는 SBD 사용 if org_size >=",
"= dict() # 사전형 변수 선언 info['author'] = '<NAME>' # 제작자 info['version'] =",
"< len(t_data): # 블록 추가해야 하나? t_size = len(t_data) - (len(t_link) * self.bsize)",
"'HWP Document File': val = ord(d[0x24]) ret['ff_hwp'] = {'compress': (val & 0x1 ==",
"= old_link + free_link[:add_num] # 최종 결과의 SBD 링크 t_link = old_link[-1:] +",
"no / bb_num off = no % bb_num # print hex(no), hex(seg), hex(off),",
"x_data = '' # b_data = '' # add_data = '' add_num =",
"또는 스토리지를 삭제한다. # --------------------------------------------------------------------- def delete(self, name, delete_storage=False, reset_stream=False): for p in",
"def arcclose(self): for fname in self.handle.keys(): zfile = self.handle[fname] zfile.close() self.handle.pop(fname) # ---------------------------------------------------------------------",
"XBBD # BBD 추가하기 bbd_no = [] b_data = '\\xff' * self.bsize *",
"or self.ssize != 0x40: # 이상 파일 정보 처리 return False # bbd",
"len(t_list): s, e = self.get_liner_value(t_list) # 연속된 링크를 모두 수집해서 한꺼번에 파일로 읽기",
"node: return i def __get_max_node(self, node): # 특정 노드의 Max 값을 가진 node를",
"t_link = self.__modify_small_block_link(None, t_num) bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) self.bbd = ''",
"대한 삭제인지 확인 if reset_stream: size = target_pps['Size'] t = ow.write(no, '\\x00' *",
"self.bbd = None self.bbd_fat = {} self.sbd = None self.root = None self.pps",
"in enumerate(sbd_list_array): data = sbd[i*self.bsize:(i+1)*self.bsize] off = (no + 1) * self.bsize self.mm",
"PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # 기존에는 SBD 사용 # raise error('Not",
"# 체크 !!! bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) for i in range(len(bbd_list_array)",
"return ret # --------------------------------------------------------------------- # OLE 블록 읽기 # --------------------------------------------------------------------- def get_bblock(buf, no,",
"no, self.bsize) self.sbd_fat = {} for i in range(len(self.sbd) / 4): n =",
"self.bbd) sbd_list_array = get_block_link(sbd_no, self.bbd_fat) # print sbd_list_array for i, no in enumerate(sbd_list_array):",
"error('Not Support : BBD -> BBD (Inc)') n = (len(data) / self.bsize) +",
"(1 if (t_idx % ((bsize / 4) - 1)) else 0) off =",
"뒤에 추가하기 self.mm += '\\x00' * self.bsize * num # 실제 필요한 데이터",
"== node: return i def __get_max_node(self, node): # 특정 노드의 Max 값을 가진",
"len(t_data) / self.ssize # 몇개의 블록이 필요한가? self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기",
"in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) # 사용하지 않는 BBD 링크를 찾는다. free_link",
"kavutil.HexDump().Buffer(buf, 0, 0x200) # --------------------------------------------------------------------- # SBD 링크를 줄인다 # org_link_list : 기존",
"p['Dir'] = kavutil.get_uint32(pps, 0x4c) p['Start'] = kavutil.get_uint32(pps, 0x74) p['Size'] = kavutil.get_uint32(pps, 0x78) p['Valid']",
"range(num_of_xbbd_blocks): t_data = get_bblock(self.mm, next_b, self.bsize) print kavutil.HexDump().Buffer(self.mm, (next_b+1) * self.bsize) next_b =",
"import struct import types import kernel import kavutil # ------------------------------------------------------------------------- # 메시지 출력",
"else: # 항상 오른쪽 노드가 큰 값임 no = pps['Next'] return no def",
"= self.mm[:off] + data + self.mm[off+self.bsize:] return True return False # --------------------------------------------------------------------- #",
"사용 # raise error('Not Support : SBD -> BBD') # 섹터가 변화는 것은",
"add_num): if add_num < 0: return [] sbd = self.sbd if self.verbose: open('sbd.dm2',",
"SBD -> SBD (Inc)') # 작업 완료 n = (len(data) / self.ssize) +",
"[] if len(self.pps) == 0: # 분석된 PPS가 없으면 종료 return False if",
"(t_idx % ((bsize / 4) - 1)) next_b = xbbd_start_block for i in",
"i in range(len(t_link)-1): no = t_link[i+1] data = struct.pack('<L', no) no = t_link[i]",
"d) o.close() ''' # ------------------------------------------------------------------------- # KavMain 클래스 # ------------------------------------------------------------------------- class KavMain: #",
"bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block # --------------------------------------------------------------------- # OLE의 BBD list의 index를 Offset으로 리턴한다.",
"+ buf[t_off + 4:] if start is not None: t_off = off +",
"필요한 블록 수 추가하기 # BBD 링크를 처음 생성하므로 이전 링크가 없다. t_link",
"is not None: t_off = off + 0x78 buf = buf[:t_off] + struct.pack('<L',",
"= kavutil.get_uint32(buf, 0x44) # num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf,",
"값을 root로 보낸다. t_no = prev_no # 2. prev 노드 하위에 next가 없는",
"% 10000 == 0: if next_b in ret: # 이미 링크가 존재하면 종료",
"Small Block의 링크를 구함 self.__modify_big_block_link(t_link, add_big_num) # 이전 링크에 필요한 블록 수 추가하여",
"'/' + pps_name # print (\"%02d : %d %s\") % (node, self.deep, name)",
"# Small block 갱신 self.bbd_fat = {} for i in range(len(self.bbd) / 4):",
"False: # 유효한 PPS만 처리함 return 0 pps_name = self.pps[node]['Name'].encode('cp949', 'ignore') name =",
"PPS가 없으면 종료 return False if self.pps[0]['Dir'] != 0xffffffff and self.pps[0]['Type'] == 5:",
"next_b = kavutil.get_uint32(t_data, bsize-4) return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block # --------------------------------------------------------------------- # OLE의",
"플러그인 엔진 정보 # --------------------------------------------------------------------- def getinfo(self): # 플러그인 엔진의 주요 정보 info",
"(내장) # --------------------------------------------------------------------- def __write_data_to_small_bolck(self, t_data, t_link): for i, n in enumerate(t_link): off",
"# ole 파일 열기 self.handle[filename] = zfile return zfile # --------------------------------------------------------------------- # arclist(self,",
"--------------------------------------------------------------------- def is_olefile(filename): try: buf = open(filename, 'rb').read(8) if buf == 'D0CF11E0A1B11AE1'.decode('hex'): return",
"num_of_bbd_blocks < 109: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 4) else: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks *",
"+= ' - ' if p['Prev'] == 0xffffffff else '%4d ' % p['Prev']",
"- num_of_xbbd_blocks # 추가해야 할 XBBD 개수 add_num += x_num b_num = (add_num",
"t_num = (old_num_bbd + b_num - 109) total_xbbd_num = (t_num / ((self.bsize -",
"= zfile return zfile # --------------------------------------------------------------------- # arclist(self, filename, fileformat) # 압축 파일",
"last_no+1) # 다음 블록을 가리켜야 함으로 1를 더함 else: x_data += '\\xfe\\xff\\xff\\xff' #",
"size=r_size + add_big_num * self.bsize) # --------------------------------------------------------------------- # BBD link 추가 요청한다. (원본",
"name[1:], 'Type': self.pps[node]['Type']} self.__full_list.append(p) if self.pps[node]['Dir'] != 0xFFFFFFFFL: self.__deep += 1 self.__get_pps_path(self.pps[node]['Dir'], name)",
"(내장) # node : PPS 인덱스 # size : 설정 크기 # start",
"None # --------------------------------------------------------------------- def unarc(self, arc_engine_id, arc_name, fname_in_arc): data = None if arc_engine_id",
"if (len(t_link) * self.bsize) < len(t_data): # 블록 추가해야 하나? t_size = len(t_data)",
"= (add_num / (self.bsize/4)) + (1 if (add_num % (self.bsize/4)) else 0) old_num_bbd",
"break end = e break else: for i in range(len(num_list)): num_list.pop(0) end =",
"p['Prev'], p['Next'], p['Dir'], p['Start'], p['Size']) ''' print ' %-2s %-32s %4s %-4s %-4s",
"Support : BBD -> BBD (Dec)') # 개발 완료 n = (len(data) /",
"있기 때문에... old_b_num = b_num while True: if old_num_bbd + b_num > 109:",
"in range(len(self.sbd) / 4): n = kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i] = n if self.verbose:",
"= [] try: self.__get_pps_path() except IndexError: pass # small block link 얻기 self.small_block",
"(스트림만 출력) # --------------------------------------------------------------------- def listdir(self, streams=True, storages=False): ret = [] for p",
"tname, p['Type'], t, p['Size']) # PPS 전체 경로 구하기 self.__deep = 0 self.__full_list",
"1. next 노드 값을 root로 보낸다. t_no = next_no else: # prev_no ==",
"''' # XBBD 늘어나는 경우 # o = OleFile('xbbd2.ppt', write_mode=True, verbose=True) # o.test()",
"0x3f)) ch = MsiBase64Encode(((ch >> 6) & 0x3f)) och.append(ch) ret_str = '' for",
"pps = self.pps[root_no] if pps['Prev'] == del_no: self.__set_pps_header(root_no, pps_prev=t_no) elif pps['Next'] == del_no:",
"블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 SBD의 링크는 모두 삭제한다. # t_link",
"self.fname = input_data self.fp = open(input_data, 'rb') buf = self.fp.read() else: buf =",
"후 재압축 유무 info['sig_num'] = len(self.listvirus()) # 진단/치료 가능한 악성코드 수 return info",
"= 0 if (len(t_link) * self.ssize) < len(t_data): # 블록 추가해야 하나? t_size",
"list 개수가 109보다 크면 xbbd를 가져와야 함 next_b = xbbd_start_block for i in",
"실제 필요한 데이터 블록 self.mm += attach_data else: special_no = [] # 특수",
"이상 파일 정보 처리 return False # bbd 읽기 self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block",
"+ buf[t_off + 4:] if pps_prev is not None: t_off = off +",
"False # --------------------------------------------------------------------- # PPS 헤더에 존재하는 특정 스트림의 크기를 조정한다. (내장) #",
"name = prefix + pps_name else: if self.pps[node]['Valid'] is False: # 유효한 PPS만",
"pps에 ListView.2의 CLSID가 존재함 # 참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids",
"if (size % self.bsize) else 0) # 추가해야 할 Big Block 개수 self.__add_big_block_num(add_big_num)",
"elif del_info: buf = buf[:off] + '\\x00' * 0x80 + buf[off+0x80:] if size",
"# t_data의 위치 self.mm = self.mm[:off] + t_data + self.mm[off + self.bsize:] #",
"= get_block_link(sbd_no, self.bbd_fat) # print sbd_list_array for i, no in enumerate(sbd_list_array): data =",
"정보 # 리턴값 : [[압축 엔진 ID, 압축된 파일 이름]] # --------------------------------------------------------------------- def",
"fileformat # HWP 인가? o = OleFile(filename) try: pics = o.openstream('FileHeader') d =",
"start is not None: t_off = off + 0x74 buf = buf[:t_off] +",
"print ' ' + '%2d %-35s %d %22s %8d' % (self.pps.index(p), tname, p['Type'],",
"# 0x4800 - 0x483F # only one charecter can be decoded ch =",
"0x78 buf = buf[:t_off] + struct.pack('<L', size) + buf[t_off + 4:] if start",
"self.exploit.append('Exploit.OLE.CVE-2003-0347') return False self.pps[x]['Valid'] = True if self.pps[x]['Prev'] != 0xffffffff: if self.pps[x]['Prev'] in",
"더이상 분석하지 않기 위해 처리 f = [] if len(self.pps) == 0: #",
"확인한다. # --------------------------------------------------------------------- def is_olefile(filename): try: buf = open(filename, 'rb').read(8) if buf ==",
"self.bbd_fat) # 이전 Small Block의 링크를 구함 self.__modify_big_block_link(t_link, add_big_num) # 이전 링크에 필요한",
"XBBD 개수 # XBBD를 위한 헤더 수정 if num_of_xbbd_blocks == 0: data =",
"Block 추가 요청 # t_link = get_block_link(r_no, self.bbd) # 이전 Small Block의 링크를",
"압축 해제된 내용 or None # --------------------------------------------------------------------- def unarc(self, arc_engine_id, arc_name, fname_in_arc): data",
"i in range(seg): if next_b == 0xfffffffe: return -1 t_buf = get_bblock(buf, next_b,",
"t = org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]] = 0xfffffffe # 링크 끝 설정하기",
"0 self.fat = None # print self.parent.verbose # 연속된 숫자 값을 리턴한다. #",
"구하기 (스트림만 출력) # --------------------------------------------------------------------- def listdir(self, streams=True, storages=False): ret = [] for",
"* ((n * self.bsize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num",
"# 늘어나는건 경우의 수가 너무 많음 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics =",
"= ord(d[0x24]) ret['ff_hwp'] = {'compress': (val & 0x1 == 0x1), 'encrypt': (val &",
"(self.bsize / 4)) + (1 if (add_num % (self.bsize / 4)) else 0)",
"개수가 추가하려는 개수보다 많거나 같으면 추가 블록 개수만 파일 뒤에 추가하기 self.mm +=",
"필요한 개수로 링크 줄이기 # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link)",
"--------------------------------------------------------------------- def exists(self, name): for p in self.__full_list: if p['Name'] == name: return",
"특수 블록 등록 last_no += 1 # END of XBBD # BBD 추가하기",
"처리 o.delete(a_name) except IOError: # print file_info.get_filename_in_archive() pass o.close() # zfile.close() return True",
"추가하기 t_num = len(t_data) / self.bsize # 몇개의 블록이 필요한가? self.__add_big_block_num(t_num) # 필요한",
"* 74) for p in self.pps: print ' ' + '%2d %-23s %d",
"self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link)",
"0xffffffff # BBD 배열을 BBD 버퍼로 바꾸기 self.bbd = '' for i in",
"rsize, 'Attached_Size': fsize - rsize } ret['ff_attach'] = fileformat # HWP 인가? o",
"# CVE-2012-0158 검사하기 # pps에 ListView.2의 CLSID가 존재함 # 참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ #",
"-= 0x3800 och.append(MsiBase64Encode(ch & 0x3f)) ch = MsiBase64Encode(((ch >> 6) & 0x3f)) och.append(ch)",
"%8d' % (self.pps.index(p), tname, p['Type'], t, p['Size']) # PPS 전체 경로 구하기 self.__deep",
"= fat[next_b] if next_b == 0xfffffffe: break if len(ret) % 10000 == 0:",
"추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_big_block_num(self, num): size = (len(self.mm) /",
"1) if i != (x_num-1): x_data += struct.pack('<L', last_no+1) # 다음 블록을 가리켜야",
"get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_num = 0 if (len(t_link) * self.ssize)",
"self.sbd = sbd self.sbd_fat = sbd_fat self.root_list_array = root_list_array self.small_block = small_block def",
"# --------------------------------------------------------------------- def __modify_bbd(self, bbd): self.bbd = bbd # 체크 !!! bbd_list_array, _,",
"109: return 0x4c + (idx * 4) else: t_idx = idx - 109",
"buf[:t_off] + struct.pack('<L', size) + buf[t_off + 4:] if start is not None:",
"압축될 압축 파일 이름 # file_infos - 압축 대상 파일 정보 구조체 #",
"no, self.bsize) self.bbd_fat = {} for i in range(len(self.bbd) / 4): n =",
"_, _, _ = get_bbd_list_array(self.mm) self.bbd = '' for i in range(len(bbd_list_array)/4): n",
"== 0xffffffff: # 더이상 오른쪽이 없으면 탐색 종료 break else: # 항상 오른쪽",
"get_bbd_list_index_to_offset(self.mm, old_num_bbd + i) # print hex(off) self.mm = (self.mm[:off] + struct.pack('<L', no)",
"i in range(num_of_xbbd_blocks): t_data = get_bblock(buf, next_b, bsize) bbd_list_array += t_data[:-4] next_b =",
"off = (next_b + 1) * self.bsize # t_data의 위치 self.mm = self.mm[:off]",
"% name) # self.init(self.mm) # return ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd,",
"pics = o.openstream('FileHeader') d = pics.read() if d[:0x11] == 'HWP Document File': val",
"0: continue except IndexError: if (x & 0x90900000) == 0x90900000: # CVE-2003-0820 취약점",
"block link 얻기 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) if self.verbose: print kavutil.vprint('Small Blocks') print",
"스토리지? t = ow.delete(no) # 링크 삭제 if t: self.init(t) # 새롭게 OLE",
"if no == -1: raise Error('PPS name is invalid.') # print no ow",
"'\\xff' * t if self.verbose: open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd) # 수정된 SDB 적용하기 return",
"= self.bbd for no in t_link: bbd = bbd[:no*4] + '\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:]",
"if self.verbose: kavutil.vprint('Header') kavutil.vprint(None, 'Big Block Size', '%d' % self.bsize) kavutil.vprint(None, 'Small Block",
"info['title'] = 'OLE Library' # 엔진 설명 info['kmd_name'] = 'ole' # 엔진 파일",
"없으면 종료 return False if self.pps[0]['Dir'] != 0xffffffff and self.pps[0]['Type'] == 5: f.append(self.pps[0]['Dir'])",
"old_link[-1:] + free_link[:add_num] # BBD에 링크 연결하기 else: # 이전 링크가 없다면... ret_link",
"in range(num_of_bbd_blocks): no = kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd += get_bblock(self.mm, no, self.bsize) self.bbd_fat =",
"--------------------------------------------------------------------- def listvirus(self): # 진단 가능한 악성코드 리스트 vlist = list() # 리스트형",
"data 뒤쪽에 추가하기 t_num = len(t_data) / self.ssize # 몇개의 블록이 필요한가? self.__add_small_block_num(t_num)",
"0x2 == 0x2), 'viewtext': (val & 0x4 == 0x4)} except Error: pass o.close()",
"파일 핸들 # filename - 파일 이름 # filename_ex - 압축 파일 내부",
"'' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no,",
"mkarc(self, arc_engine_id, arc_name, file_infos): if arc_engine_id == 'arc_ole': o = OleFile(arc_name, write_mode=True) #",
"4) / 4)) else 0) x_num = total_xbbd_num - num_of_xbbd_blocks # 추가해야 할",
"len(t_data) - (len(t_link) * self.bsize) t_num = (t_size / self.bsize) + (1 if",
"BBD를 수정한다. # bbd : 수정된 BBD 이미지 # --------------------------------------------------------------------- def __modify_bbd(self, bbd):",
"주석 처리 if self.verbose: print if num_of_bbd_blocks < 109: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks *",
"0) # 추가해야 할 Big Block 개수 self.__add_big_block_num(add_big_num) # Big Block 추가 요청",
"if self.pps[node]['Valid'] is False: # 유효한 PPS만 처리함 return 0 pps_name = self.pps[node]['Name'].encode('cp949',",
"존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_small_block_link(t_link, t_num) # Small block 갱신",
"self.pps[0]['Valid'] = True if len(f) == 0: # 정상적인 PPS가 없음 return False",
"블록 추가해야 하나? t_size = len(t_data) - (len(t_link) * self.bsize) t_num = (t_size",
"buf[t_off + 4:] if start is not None: t_off = off + 0x74",
"bbd_no # 특수 블록 처리 (bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block) bbd_list_array, num_of_bbd_blocks, _, _",
"'Num of XBBD Blocks', '%d' % num_of_xbbd_blocks) if num_of_bbd_blocks > 109: # bbd",
"for p in self.__full_list: if p['Name'] == name: return True else: return False",
"pps_dir is not None: t_off = off + 0x4C buf = buf[:t_off] +",
": https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if pps[0x50:0x60] in",
"= xbbd_start_block for i in range(seg): if next_b == 0xfffffffe: return -1 t_buf",
"self.small_block, self.verbose) target_pps = self.pps[no] if target_pps['Valid'] and target_pps['Type'] == 2: # 유효한",
"= {} self.verbose = verbose return 0 # 플러그인 엔진 초기화 성공 #",
"+ ('-' * 74) for p in self.pps: print ' ' + '%2d",
"BBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.bbd) # 이전 링크 수집하기",
"self.pps[x]['Next'] in scaned_pps_node: self.pps[x]['Next'] = 0xffffffff else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir'] != 0xffffffff:",
"= org_link_list[:num_link] t_link[t[0]] = 0xfffffffe # 링크 끝 설정하기 # 남은 링크는 모두",
"추가한다. # num : 추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_big_block_num(self, num):",
"self.pps[x]['Prev'] = 0xffffffff else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next'] != 0xffffffff: if self.pps[x]['Next'] in",
"None: t_off = off + 0x78 buf = buf[:t_off] + struct.pack('<L', size) +",
"# 범위를 벗어나면 에러 return -1 if idx <= 109: return 0x4c +",
"* 4) else: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 109) next_b = xbbd_start_block for i",
"개수만큼 추가한다. # num : 추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_big_block_num(self,",
"* self.parent.bsize off += (n % div_n) * self.parent.ssize data += self.parent.mm[off:off +",
"크기 수정 self.__set_pps_header(no, size=len(data)) else: # 기존에는 SBD 사용 # raise error('Not Support",
"= OleFile(filename, verbose=self.verbose) # ole 파일 열기 self.handle[filename] = zfile return zfile #",
"return -1 if idx <= 109: return 0x4c + (idx * 4) else:",
"수 # --------------------------------------------------------------------- def __decrease_sbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: # SBD를",
"('\\x00' * ((n*self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 # t_link",
"old_link: ret_link = old_link + free_link[:add_num] # 최종 결과의 SBD 링크 t_link =",
"def unarc(self, arc_engine_id, arc_name, fname_in_arc): data = None if arc_engine_id == 'arc_ole': o",
"range(len(self.sbd) / 4): n = kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i] = n if self.verbose: open('sbd.dmp',",
"(add_num / (self.bsize/4)) + (1 if (add_num % (self.bsize/4)) else 0) old_num_bbd =",
"# SBD link 추가 요청한다. (원본 이미지의 SBD link가 수정 됨) # old_link",
"* 4:] if self.verbose: open('bbd.dm3', 'wb').write(bbd) # 원래 이미지에 BBD 덮어쓰기 self.__modify_bbd(bbd) return",
"self.handle: # 이전에 열린 핸들이 존재하는가? zfile = self.handle.get(filename, None) else: zfile =",
"# 1. 0xffffffff 노드 값을 root로 보낸다. t_no = 0xffffffff # root 노드를",
"self.pps[no]['Size'] ''' if org_size >= 0x1000: # read_size = self.bsize fat = self.bbd",
"0 # 플러그인 엔진 초기화 성공 # --------------------------------------------------------------------- # uninit(self) # 플러그인 엔진을",
"추가해야 할 XBBD 개수 add_num += x_num b_num = (add_num / (self.bsize /",
"OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('Scripts/DefaultJScript') d = pics.read() d = zlib.decompress(d, -15)",
"' % p['Start'] tname = p['Name'].encode(sys.stdout.encoding, 'replace') print ' ' + '%2d %-35s",
"o.delete(a_name) except IOError: # print file_info.get_filename_in_archive() pass o.close() # zfile.close() return True return",
"-*- # Author: <NAME>(<EMAIL>) import os import sys import struct import types import",
"is invalid.' % name) # self.init(self.mm) # return ow = OleWriteStream(self.mm, self.pps, self.bsize,",
"0x200 != 0 or self.ssize != 0x40: # 이상 파일 정보 처리 return",
"등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return vlist # --------------------------------------------------------------------- # format(self, filehandle, filename, filename_ex)",
"0x1), 'encrypt': (val & 0x2 == 0x2), 'viewtext': (val & 0x4 == 0x4)}",
"# 기존 XBBD 마지막에 새로운 XBBD 링크 추가 t_data = t_data[:-4] + struct.pack('<L',",
"else '%8X ' % p['Start'] tname = p['Name'].encode(sys.stdout.encoding, 'replace') print ' ' +",
"유효한 스토리지? t = ow.delete(no) # 링크 삭제 if t: self.init(t) # 새롭게",
"self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support : BBD -> BBD (Inc)') n",
"파일 정보 처리 return False # bbd 읽기 self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block =",
"arc_engine_id, arc_name, fname_in_arc) # 입력값 : arc_engine_id - 압축 엔진 ID # arc_name",
"개수 # --------------------------------------------------------------------- def __modify_big_block_link(self, old_link, add_num): if add_num < 0: return []",
"scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] = True if len(f) == 0: # 정상적인 PPS가 없음 return",
"# 최종 결과의 BBD 링크 t_link = old_link[-1:] + free_link[:add_num] # BBD에 링크",
">= len(data): # raise error('Not Support : BBD -> BBD (Dec)') # 개발",
"이미 분석한 노드의 경우 더이상 분석하지 않기 위해 처리 f = [] if",
"while True: if old_num_bbd + b_num > 109: t_num = (old_num_bbd + b_num",
"= None end = None if not start: start = num_list.pop(0) e =",
"# 진단/치료 가능한 악성코드 수 return info # --------------------------------------------------------------------- # listvirus(self) # 진단/치료",
"pps_next=None, pps_dir=None, del_info=False): n = self.root_list_array[node / 4] buf = get_bblock(self.mm, n, self.bsize)",
"링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.bbd) # 이전 링크 수집하기 t_link",
"= pics.read() if d[:0x11] == 'HWP Document File': val = ord(d[0x24]) ret['ff_hwp'] =",
"_, _ = get_bbd_list_array(self.mm) self.bbd = '' for i in range(len(bbd_list_array)/4): n =",
"= get_bblock(buf, next_b, bsize) next_b = kavutil.get_uint32(t_buf, bsize-4) return (next_b + 1) *",
"[] for i in range(len(t) / 4): bbd_list_array.append(kavutil.get_uint32(t, i * 4)) for i,",
"--------------------------------------------------------------------- # 특정 데이터를 big block 링크를 따라 데이터 쓰기 (내장) # ---------------------------------------------------------------------",
"i in range(len(t) / 4): bbd_list_array.append(kavutil.get_uint32(t, i * 4)) for i, n in",
"(내장) # --------------------------------------------------------------------- def __get_pps_path(self, node=0, prefix=''): if node == 0: pps_name =",
"print if self.bsize % 0x200 != 0 or self.ssize != 0x40: # 이상",
"기존에는 BBD 사용 if org_size >= len(data): # raise error('Not Support : BBD",
"self.bsize-4) # 기존 XBBD 마지막에 새로운 XBBD 링크 추가 t_data = t_data[:-4] +",
"pps['Next'] == 0xffffffff: # 더이상 오른쪽이 없으면 탐색 종료 break else: # 항상",
"= kavutil.get_uint32(pps, 0x78) p['Valid'] = False # CVE-2012-0158 검사하기 # pps에 ListView.2의 CLSID가",
"self.sbd += get_bblock(self.mm, no, self.bsize) self.sbd_fat = {} for i in range(len(self.sbd) /",
"BBD에 링크 연결하기 else: # 이전 링크가 없다면... ret_link = free_link[:add_num] # 최종",
"0xffffffff and i < size / self.bsize)] if len(free_link) >= num: # 여유분이",
"if org_size >= 0x1000: # read_size = self.bsize fat = self.bbd else: #",
"filename :', rname, len(buf) # print '[-] rname :', o.write_stream(a_name, buf) # zfile.writestr(a_name,",
": 임시로 작성한거라 최적화 필요함 def get_liner_value(self, num_list): start = None end =",
"if mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] = 'OLE' # OLE 뒤에 첨부된 파일이 있는지를",
"self.bbd_fat = {} for i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i*4)",
"else: name = pps[0:t_size-2] p['Name'] = DecodeStreamName(name).decode('UTF-16LE', 'replace') else: p['Name'] = '' p['Type']",
"수 있음 b_num = (add_num / (self.bsize/4)) + (1 if (add_num % (self.bsize/4))",
"next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(buf, next_b, bsize) bbd_list_array",
"하나? t_size = len(t_data) - (len(t_link) * self.ssize) t_num = (t_size / self.ssize)",
"self.__set_pps_header(root_no, pps_prev=t_no) elif pps['Next'] == del_no: self.__set_pps_header(root_no, pps_next=t_no) else: # Dir self.__set_pps_header(root_no, pps_dir=t_no)",
"self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # BBD 링크를 처음 생성하므로 이전 링크가",
"and delete_storage: # 유효한 스토리지? t = ow.delete(no) # 링크 삭제 if t:",
"print repr(self.mm[t_off:t_off+4]) # t = get_bblock(self.mm, t_no, self.bsize) # print repr(t) # t",
"--------------------------------------------------------------------- def __get_pps_path(self, node=0, prefix=''): if node == 0: pps_name = '' name",
"in root_list_array: self.root += get_bblock(self.mm, no, self.bsize) if self.verbose: open('root.dmp', 'wb').write(self.root) print kavutil.vprint('ROOT')",
"1) * self.read_size data += self.parent.mm[off:off + self.read_size * (e - s +",
"plugins_path - 플러그인 엔진의 위치 # verbose - 디버그 모드 (True or False)",
"else 0) # 추가해야 할 Big Block 개수 self.__add_big_block_num(add_big_num) # Big Block 추가",
"악성코드 리스트 # --------------------------------------------------------------------- def listvirus(self): # 진단 가능한 악성코드 리스트 vlist =",
"self.__get_handle(filename) for name in o.listdir(): file_scan_list.append(['arc_ole', name]) return file_scan_list except: pass return []",
"return ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD를 수정한다. # sbd :",
"= self.__modify_big_block_link(t_link, t_num) # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) #",
"not None: t_off = off + 0x4C buf = buf[:t_off] + struct.pack('<L', pps_dir)",
"# OLE 헤더와 동일 if mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] = 'OLE' # OLE",
"+= bbd_no # 특수 블록 처리 (bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block) bbd_list_array, num_of_bbd_blocks, _,",
"= (len(data) / self.bsize) + (1 if (len(data) % self.bsize) else 0) t_data",
"4): n = kavutil.get_uint32(self.bbd, i * 4) self.bbd_fat[i] = n self.small_block = get_block_link(self.pps[0]['Start'],",
"get_block_link(sb, self.fat) data = '' if size >= 0x1000: t_list = list(list_array) while",
"self.root[i*0x80:(i+1)*0x80] t_size = min(kavutil.get_uint16(pps, 0x40), 0x40) if t_size != 0: # 출력시 이름이",
"수정한다. # sbd : 수정된 SBD 이미지 # --------------------------------------------------------------------- def __modify_sbd(self, sbd): #",
"== num_link: return org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- # Big Block을",
"최종 결과의 BBD 링크 t_link = old_link[-1:] + free_link[:add_num] # BBD에 링크 연결하기",
"get_bblock(self.mm, t_no, self.bsize) # print repr(t) # t = kavutil.get_uint32(t, off*4) # print",
"# PPS 크기 수정 self.__set_pps_header(no, size=len(data)) return self.mm # --------------------------------------------------------------------- # 특정 데이터를",
"# PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support : BBD",
"0x80 + buf[off+0x80:] if size is not None: t_off = off + 0x78",
"0: return [] # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _ =",
"listvirus(self): # 진단 가능한 악성코드 리스트 vlist = list() # 리스트형 변수 선언",
"링크가 필요하다 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize 만큼씩 Overwrite",
"# --------------------------------------------------------------------- def __modify_small_block_link(self, old_link, add_num): if add_num < 0: return [] sbd",
"'\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:] self.__modify_bbd(bbd) else: # 기존에는 SBD 사용 if org_size >= len(data):",
"0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if verbose: kavutil.vprint(None, 'Num of BBD",
"if len(f) == 0: # 정상적인 PPS가 없음 return False while len(f): x",
"주요 데이터 self.mm = buf self.bsize = 0 self.ssize = 0 # 임시",
"t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_num = 0 if (len(t_link)",
"악성코드 수 return info # --------------------------------------------------------------------- # listvirus(self) # 진단/치료 가능한 악성코드의 리스트를",
"파싱하기 # --------------------------------------------------------------------- def parse(self): buf = self.mm[:8] if buf != 'D0CF11E0A1B11AE1'.decode('hex'): raise",
"------------------------------------------------------------------------- # 엔진 오류 메시지를 정의 # ------------------------------------------------------------------------- class Error(Exception): pass # ---------------------------------------------------------------------",
"s + 1)] else: for n in list_array: div_n = self.parent.bsize / self.parent.ssize",
"self.mm[:size] # 뒤쪽 쓸모 없는 부분은 제거 attach_data = self.mm[size:] # 파일 뒤에",
"for no in t_link: bbd = bbd[:no*4] + '\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:] self.__modify_bbd(bbd) else:",
"x = f.pop(0) try: if self.pps[x]['Type'] != 1 and self.pps[x]['Type'] != 2 and",
"- 파일 이름 # 리턴값 : 압축 파일 핸들 # --------------------------------------------------------------------- def __get_handle(self,",
"% 8) * self.ssize self.mm = self.mm[:off] + t_data[i * self.ssize:(i + 1)",
"추가해야 하나? t_size = len(t_data) - (len(t_link) * self.ssize) t_num = (t_size /",
"self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # SBD 링크를 처음 생성하므로 이전 링크가",
"더함 else: x_data += '\\xfe\\xff\\xff\\xff' # 마지막 블록의 링크는 끝을 처리함 special_no.append(last_no) #",
"n = kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i] = n if self.verbose: open('sbd.dmp', 'wb').write(self.sbd) print kavutil.vprint('SBD')",
"# --------------------------------------------------------------------- # PPS 전체 경로 구하기 (내장) # --------------------------------------------------------------------- def __get_pps_path(self, node=0,",
"next_b != 0xfffffffe: ret.append(next_b) while True: try: next_b = fat[next_b] if next_b ==",
"False # --------------------------------------------------------------------- # OleFile 클래스 # --------------------------------------------------------------------- class OleFile: def __init__(self, input_data,",
"ret['ff_ole'] = 'OLE' # OLE 뒤에 첨부된 파일이 있는지를 조사한다. fsize = len(mm)",
"name, data): for p in self.__full_list: if p['Name'] == name: no = p['Node']",
"'OLE Library' # 엔진 설명 info['kmd_name'] = 'ole' # 엔진 파일 이름 info['make_arc_type']",
"= get_bblock(self.mm, t_no, self.bsize) # print repr(t) # t = kavutil.get_uint32(t, off*4) #",
"(len(t_link) * self.bsize) < len(t_data): # 블록 추가해야 하나? t_size = len(t_data) -",
"= buf[:off] + '\\x00' * 0x80 + buf[off+0x80:] if size is not None:",
"# node : PPS 인덱스 # size : 설정 크기 # start :",
"block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data))",
"break ret.append(next_b) except KeyError: break return ret # --------------------------------------------------------------------- # OLE 블록 읽기",
"Small Block을 주어진 개수만큼 추가한다. # num : 추가할 Big Block 개수 #",
"data = bbd[i * self.bsize:(i + 1) * self.bsize] off = (no +",
"최종 결과의 SBD 링크 t_link = old_link[-1:] + free_link[:add_num] # SBD에 링크 연결하기",
"이전 링크 수집하기 t_link = self.__decrease_bbd_link(t_link, n) # 필요한 개수로 링크 줄이기 #",
"= bbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + bbd[(no + 1) * 4:] if",
"파일 뒤에 붙어 있는 잔여 데이터 # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks,",
"('No', 'Name', 'Type', 'Prev', 'Next', ' Dir', 'SB', 'Size') print ' ' +",
"SBD Blocks', '%d' % num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd, 0, 0x80) # PPS 읽기 self.pps",
"# 수정된 SDB 적용하기 return ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD를",
"+ 1) * self.bsize off += (n % 8) * self.ssize self.mm =",
"요청 # t_link = get_block_link(r_no, self.bbd) # 이전 Small Block의 링크를 구함 t_link",
"HWP 인가? o = OleFile(filename) try: pics = o.openstream('FileHeader') d = pics.read() if",
"block 링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_big_block(self, t_data, t_link): for",
"(len(data) % self.bsize) else 0) t_data = data + ('\\x00' * ((n *",
"조합 self.mm += x_data + b_data + add_data + attach_data # 특수 블록에",
"((node % 4) * 0x80) if del_info and off == 0x180: buf =",
"num_of_xbbd_blocks = kavutil.get_uint32(self.mm, 0x48) # 추가적인 Big Block을 계산한다. BBD List와 XBBD 블록도",
"입력값 : filename - 파일 이름 # 리턴값 : 압축 파일 핸들 #",
"+ self.mm[off + self.ssize:] # --------------------------------------------------------------------- # OLE 영역의 특정 위치에 1개의 Big",
"< num_of_bbd_blocks: return False self.bbd = '' for i in range(num_of_bbd_blocks): no =",
"('\\x00' * ((n * self.bsize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기",
"= '<NAME>' # 제작자 info['version'] = '1.1' # 버전 info['title'] = 'OLE Library'",
"else: return False # --------------------------------------------------------------------- # 스트림을 연다 # --------------------------------------------------------------------- def openstream(self, name):",
"p['Name'], p['Type'], p['Prev'], p['Next'], p['Dir'], p['Start'], p['Size']) ''' print ' %-2s %-32s %4s",
"PPS 전체 경로 구하기 self.__deep = 0 self.__full_list = [] try: self.__get_pps_path() except",
"d o.write_stream('FileHeader', d) o.close() ''' ''' # case1 o = OleFile('normal.hwp', write_mode=True, verbose=True)",
"next_b = kavutil.get_uint32(t_data, self.bsize-4) ''' if len(self.bbd_list_array)/4 < num_of_bbd_blocks: return False self.bbd =",
"- 1) if i != (x_num-1): x_data += struct.pack('<L', last_no+1) # 다음 블록을",
"buf[:t_off] + struct.pack('<L', pps_prev) + buf[t_off + 4:] if pps_next is not None:",
"0) t_data = data + ('\\x00' * ((n * self.bsize) - len(data))) #",
"# 리턴값 : [[압축 엔진 ID, 압축된 파일 이름]] # --------------------------------------------------------------------- def arclist(self,",
"+ self.mm[off + self.bsize:] if __name__ == '__main__': # import zlib # o",
"self.__get_handle(arc_name) fp = o.openstream(fname_in_arc) try: data = fp.read() except: data = None return",
"= kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) if self.verbose: open('bbd.dm2', 'wb').write(bbd) bbd_link",
"pps['Prev'] == del_no: self.__set_pps_header(root_no, pps_prev=t_no) elif pps['Next'] == del_no: self.__set_pps_header(root_no, pps_next=t_no) else: #",
"next_b = no if next_b != 0xfffffffe: ret.append(next_b) while True: try: next_b =",
"if num_of_bbd_blocks < 109: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 4) else: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks",
"주어진 개수만큼 추가한다. # num : 추가할 Big Block 개수 # --------------------------------------------------------------------- def",
"만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise",
"p['Start'], p['Size']) ''' print ' %-2s %-32s %4s %-4s %-4s %-4s %8s %8s'",
"total_bbd_num = old_num_bbd + b_num # 전체 BBD list 개수 self.mm = self.mm[:0x2c]",
"'encrypt': (val & 0x2 == 0x2), 'viewtext': (val & 0x4 == 0x4)} except",
"self.mm += attach_data else: special_no = [] # 특수 목적의 Big Block 번호.",
"== 0xffffffff: # 단일 노드 # 1. 0xffffffff 노드 값을 root로 보낸다. t_no",
"bbd = bbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + bbd[(no + 1) * 4:]",
"self.mm에 SBD 적용하기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) for i,",
"print ' %-2s %-32s %4s %-4s %-4s %-4s %8s %8s' % ('No', 'Name',",
"self.sbd = '' for no in sbd_list_array: self.sbd += get_bblock(self.mm, no, self.bsize) self.sbd_fat",
"BBD -> SBD') # 섹터가 변화는 것은 Dec, Inc가 의미 없음 n =",
"# TODO : 임시로 작성한거라 최적화 필요함 def get_liner_value(self, num_list): start = None",
"break else: for i in range(len(num_list)): num_list.pop(0) end = e return start, end",
"노드 값을 root로 보낸다. t_no = next_no else: # prev_no == 0xffffffff and",
"range(len(self.root) / 0x80): p = {} pps = self.root[i*0x80:(i+1)*0x80] t_size = min(kavutil.get_uint16(pps, 0x40),",
"Small Block 링크가 필요하다 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize",
"--------------------------------------------------------------------- # PPS Tree의 유효성을 체크한다. (내장) # --------------------------------------------------------------------- def __valid_pps_tree(self): scaned_pps_node =",
"버전 info['title'] = 'OLE Library' # 엔진 설명 info['kmd_name'] = 'ole' # 엔진",
"수집해서 한꺼번에 파일로 읽기 off = (s + 1) * self.read_size data +=",
"vlist = list() # 리스트형 변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는 악성코드 이름 등록",
"== 0x00 and ord(pps[1]) == 0x00: name = '_\\x00' + pps[2:t_size-2] else: name",
"(len(data) / self.bsize) + (1 if (len(data) % self.bsize) else 0) t_data =",
"# --------------------------------------------------------------------- # BBD 링크를 줄인다 # org_link_list : 기존 Small block 링크",
"다음 continue t = '' t += ' - ' if p['Prev'] ==",
"파일 이름 # 리턴값 : 압축 파일 핸들 # --------------------------------------------------------------------- def __get_handle(self, filename):",
"== 0xfffffffe: break if len(ret) % 10000 == 0: if next_b in ret:",
"buf = open(filename, 'rb').read(8) if buf == 'D0CF11E0A1B11AE1'.decode('hex'): return True except IOError: pass",
"add_num < 0: return [] # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _,",
"[] for i in range(len(name) / 2): wch.append(kavutil.get_uint16(name, i * 2)) for ch",
"확인 if reset_stream: size = target_pps['Size'] t = ow.write(no, '\\x00' * size) #",
"d = pics.read() if d[:0x11] == 'HWP Document File': val = ord(d[0x24]) ret['ff_hwp']",
"0 self.__full_list = [] self.parse() # OLE 파일을 분석 def close(self): if self.isfile:",
"SBD 링크를 처음 생성하므로 이전 링크가 없다. t_link = self.__modify_small_block_link(None, t_num) bbd_list_array, _,",
"end def read(self): pps = self.parent.pps[self.node] sb = pps['Start'] size = pps['Size'] if",
"# b_data = '' # add_data = '' add_num = num - n",
"= [] for p in self.__full_list: if p['Type'] == 2 and streams: ret.append(p['Name'])",
"(len(t_link) * self.bsize) t_num = (t_size / self.bsize) + (1 if (t_size %",
"pics = o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'], o.sbd) # d2 = pics.read() o.close() ''' #",
"t_link: bbd = bbd[:no*4] + '\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:] self.__modify_bbd(bbd) else: # 기존에는 SBD",
"if pps['Next'] == 0xffffffff: # 더이상 오른쪽이 없으면 탐색 종료 break else: #",
"= [] fat = bbd_or_sbd_fat next_b = no if next_b != 0xfffffffe: ret.append(next_b)",
"= self.mm[:8] if buf != 'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not Ole signature') # big block,",
"else: # prev_no == 0xffffffff and next_no == 0xffffffff: # 단일 노드 #",
"0: data = struct.pack('<LL', last_no, total_xbbd_num) self.mm = self.mm[:0x44] + data + self.mm[0x4C:]",
"--------------------------------------------------------------------- # OLE의 BBD 리스트를 얻는다. # --------------------------------------------------------------------- def get_bbd_list_array(buf, verbose=False): bbd_list_array =",
"o.openstream('FileHeader') d = pics.read() if d[:0x11] == 'HWP Document File': val = ord(d[0x24])",
"None # print self.parent.verbose # 연속된 숫자 값을 리턴한다. # TODO : 임시로",
"contains two characters ch -= 0x3800 och.append(MsiBase64Encode(ch & 0x3f)) ch = MsiBase64Encode(((ch >>",
"= OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) t",
"org_link_list elif len(org_link_list) == num_link: return org_link_list else: raise Error('Invalid call') # ---------------------------------------------------------------------",
"--------------------------------------------------------------------- # 스트림의 데이터를 덮어쓴다. # --------------------------------------------------------------------- def write_stream(self, name, data): for p",
": 0 - 성공, 0 이외의 값 - 실패 # --------------------------------------------------------------------- def uninit(self):",
"BBD 적용하기 t, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) bbd_list_array = []",
"sbd[(no+1)*4:] no = t_link[-1] sbd = sbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + sbd[(no",
"# 엔진 오류 메시지를 정의 # ------------------------------------------------------------------------- class Error(Exception): pass # --------------------------------------------------------------------- #",
": arc_engine_id - 압축 엔진 ID # arc_name - 압축 파일 # fname_in_arc",
"self.verbose) bbd_list_array = [] for i in range(len(t) / 4): bbd_list_array.append(kavutil.get_uint32(t, i *",
"'' for i in t_link: self.bbd += struct.pack('<L', i) # self.mm에 BBD 적용하기",
"bbd_no = [] b_data = '\\xff' * self.bsize * b_num for i in",
"no, bsize): off = (no+1) * bsize return buf[off:off+bsize] # --------------------------------------------------------------------- # OLE의",
"Library' # 엔진 설명 info['kmd_name'] = 'ole' # 엔진 파일 이름 info['make_arc_type'] =",
"pics.read() if d[:0x11] == 'HWP Document File': val = ord(d[0x24]) ret['ff_hwp'] = {'compress':",
"= DecodeStreamName(name).decode('UTF-16LE', 'replace') else: p['Name'] = '' p['Type'] = ord(pps[0x42]) p['Prev'] = kavutil.get_uint32(pps,",
"0x1000: # BBD를 사용한다. if org_size >= 0x1000: # 기존에는 BBD 사용 if",
"= kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block = kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(self.mm, 0x48) # 추가적인",
"+= struct.pack('<L', i) # self.mm에 SBD 적용하기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) sbd_list_array =",
"# bbd 읽기 self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) ''' #",
"num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) bbd_list_array = [] for i in range(len(t)",
"<< kavutil.get_uint16(mm, 0x1e) rsize = (fsize / bsize) * bsize if fsize >",
"블록 추가해야 하나? t_size = len(t_data) - (len(t_link) * self.ssize) t_num = (t_size",
"# 파일로 접근 중인가? if isinstance(input_data, types.StringType): if os.path.exists(input_data): self.isfile = True self.fname",
"open('sbd.dm2', 'wb').write(sbd) # SBD 링크를 생성한다. sbd_link = [] for i in range(len(sbd)",
"get_block_link(o.pps[6]['Start'], o.sbd) # d2 = pics.read() o.close() ''' # XBBD 늘어나는 경우 #",
"ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose)",
"self.sbd_fat) # 이전 링크 수집하기 sbd = self.sbd for no in t_link: sbd",
"self.init(t) # 새롭게 OLE 재로딩 elif target_pps['Valid'] and target_pps['Type'] == 1 and delete_storage:",
"--------------------------------------------------------------------- def __write_data_to_big_block(self, t_data, t_link): for i, n in enumerate(t_link): off = (n",
"= self.parent.bbd_fat else: self.read_size = self.parent.ssize self.fat = self.parent.sbd_fat list_array = get_block_link(sb, self.fat)",
"in self.pps: if p['Valid'] is False: # 유효한 Tree가 아니면 다음 continue t",
">= 0x1000: # 기존에는 BBD 사용 # raise error('Not Support : BBD ->",
"p in self.__full_list: if p['Name'] == name: no = p['Node'] break else: no",
"수집하기 sbd = self.sbd for no in t_link: sbd = sbd[:no*4] + '\\xff\\xff\\xff\\xff'",
"파일 이름 # file_infos - 압축 대상 파일 정보 구조체 # 리턴값 :",
"-> SBD') # 섹터가 변화는 것은 Dec, Inc가 의미 없음 n = (len(data)",
"# 원래 이미지에 SBD 덮어쓰기 sbd_no = kavutil.get_uint32(self.mm, 0x3c) # sbd_list_array = get_block_link(sbd_no,",
"i, no in enumerate(bbd_link) if (no == 0xffffffff and i < size /",
"= '' if size >= 0x1000: t_list = list(list_array) while len(t_list): s, e",
"= pps[0:t_size-2] p['Name'] = DecodeStreamName(name).decode('UTF-16LE', 'replace') else: p['Name'] = '' p['Type'] = ord(pps[0x42])",
"XBBD 처리하기 if total_bbd_num > 109: t_num = (total_bbd_num - 109) total_xbbd_num =",
"+ '\\xfe\\xff\\xff\\xff' + bbd[(no + 1) * 4:] if self.verbose: open('bbd.dm3', 'wb').write(bbd) #",
"if i != (x_num-1): x_data += struct.pack('<L', last_no+1) # 다음 블록을 가리켜야 함으로",
"bsize-4) return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block # --------------------------------------------------------------------- # OLE의 BBD list의 index를",
"전체 링크 수 # --------------------------------------------------------------------- def __decrease_sbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link:",
"= ('\\x00' * self.bsize * add_num) # 추가해야 할 BBD list 개수는 한개의",
"* 4) self.mm = self.mm[:t_off] + '\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:] # print repr(self.mm[t_off:t_off+4]) #",
"않는 블록을 수집한다. t_link = self.__modify_small_block_link(t_link, t_num) # Small block 갱신 self.bbd_fat =",
"for no in sbd_list_array: self.sbd += get_bblock(self.mm, no, self.bsize) self.sbd_fat = {} for",
"self.bbd += struct.pack('<L', i) # self.mm에 BBD 적용하기 t, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block =",
"# --------------------------------------------------------------------- def __add_big_block_num(self, num): size = (len(self.mm) / self.bsize) * self.bsize #",
"self.parent.bsize self.fat = self.parent.bbd_fat else: self.read_size = self.parent.ssize self.fat = self.parent.sbd_fat list_array =",
"t_link = self.__modify_small_block_link(t_link, t_num) # Small block 갱신 self.bbd_fat = {} for i",
"None if arc_engine_id == 'arc_ole': o = self.__get_handle(arc_name) fp = o.openstream(fname_in_arc) try: data",
"늘어나는건 경우의 수가 너무 많음 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('FileHeader')",
"XBBD 블록도 추가될 수 있기 때문에... old_b_num = b_num while True: if old_num_bbd",
"블록 등록 last_no += 1 # END of XBBD # BBD 추가하기 bbd_no",
"(s + 1) * self.read_size data += self.parent.mm[off:off + self.read_size * (e -",
"len(org_link_list) == num_link: return org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- # BBD",
"# 항상 오른쪽 노드가 큰 값임 no = pps['Next'] return no def delete(self,",
"스트림을 연다 # --------------------------------------------------------------------- def openstream(self, name): # ----------------------------------------------------------------- # 스트림 전용 클래스",
"+ self.bsize:] # --------------------------------------------------------------------- # 특정 데이터를 small block 링크를 따라 데이터 쓰기",
"return False if self.verbose: print kavutil.vprint('Property Storage') ''' print ' %-2s %-20s %4s",
"((n * self.bsize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num =",
"next_no == 0xffffffff: # Prev만 존재 # 1. prev 노드 값을 root로 보낸다.",
"= data + ('\\x00' * ((n * self.ssize) - len(data))) # 여분의 크기를",
"> 109: t_num = (total_bbd_num - 109) total_xbbd_num = (t_num / ((self.bsize -",
"# num : 추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_big_block_num(self, num): size",
"_, _, _ = get_bbd_list_array(self.mm) for i in range(len(bbd_list_array) / 4): no =",
"raise error('Not Support : BBD -> SBD') # 섹터가 변화는 것은 Dec, Inc가",
"(self.parent.small_block[n / div_n] + 1) * self.parent.bsize off += (n % div_n) *",
"bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: #",
">= 0x1000: # read_size = self.bsize fat = self.bbd else: # read_size =",
"else: # 이전 링크가 없다면... ret_link = free_link[:add_num] # 최종 결과의 BBD 링크",
"'\\xfe\\xff\\xff\\xff' + sbd[(no + 1) * 4:] # SBD가 나누어 bsize 단위가 아니면",
"BBD 사용 if org_size >= len(data): # raise error('Not Support : BBD ->",
"if (add_num % (self.bsize/4)) else 0) old_num_bbd = kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block = kavutil.get_uint32(self.mm,",
"n in enumerate(t_link): off = (self.small_block[n / 8] + 1) * self.bsize off",
"BBD link가 수정 됨) # old_link : 기존 BBD link # add_num :",
"압축 파일 이름 # file_infos - 압축 대상 파일 정보 구조체 # 리턴값",
"struct.pack('<H', ch) # print ret_str.decode('UTF-16LE', 'replace') return ret_str # --------------------------------------------------------------------- # OLE 내부",
"헤더에 존재하는 특정 스트림의 크기를 조정한다. (내장) # node : PPS 인덱스 #",
"= '' t += ' - ' if p['Prev'] == 0xffffffff else '%4d",
"old_link, add_num): if add_num < 0: return [] sbd = self.sbd if self.verbose:",
"수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support : SBD -> SBD (Inc)')",
"0: # 출력시 이름이 깨질 가능성이 큼 if ord(pps[0]) & 0xF0 == 0x00",
"self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support",
"self.bbd = '' for i in range(len(bbd_list_array)/4): n = kavutil.get_uint32(bbd_list_array, i*4) self.bbd +=",
"foo d = zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d) o.close() ''' # ------------------------------------------------------------------------- # KavMain 클래스",
"# PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support : SBD",
"# listvirus(self) # 진단/치료 가능한 악성코드의 리스트를 알려준다. # 리턴값 : 악성코드 리스트",
"off = (self.parent.small_block[n / div_n] + 1) * self.parent.bsize off += (n %",
"def openstream(self, name): # ----------------------------------------------------------------- # 스트림 전용 클래스 # ----------------------------------------------------------------- class Stream:",
"추가 요청한다. (원본 이미지의 SBD link가 수정 됨) # old_link : 기존 SBD",
"수정, Next 수정 o.close() ''' o = OleFile('normal.hwp', verbose=True) pics = o.openstream('PrvImage') print",
"def __modify_big_block_link(self, old_link, add_num): if add_num < 0: return [] # 전체 BBD",
"# 이전 링크 수집하기 sbd = self.sbd for no in t_link: sbd =",
"t_link.append(kavutil.get_uint32(self.sbd, i * 4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]] = 0xfffffffe",
"--------------------------------------------------------------------- # BBD를 수정한다. # bbd : 수정된 BBD 이미지 # --------------------------------------------------------------------- def",
"self.pps[x]['Next'] = 0xffffffff else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir'] != 0xffffffff: if self.pps[x]['Dir'] in",
"if add_num < 0: return [] sbd = self.sbd if self.verbose: open('sbd.dm2', 'wb').write(sbd)",
"len(self.pps) == 0: # 분석된 PPS가 없으면 종료 return False if self.pps[0]['Dir'] !=",
"# BBD를 사용한다. if org_size >= 0x1000: # 기존에는 BBD 사용 if org_size",
"# root를 찾기 root_no = self.__get_root_node(del_no) # 양쪽 노드가 존재하는가? if prev_no !=",
"del_pps의 next_no를 등록한다. blank_next_no = self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no) elif prev_no != 0xffffffff and",
"filename): if filename in self.handle: # 이전에 열린 핸들이 존재하는가? zfile = self.handle.get(filename,",
"p['Dir'] == 0xffffffff else '%4d ' % p['Dir'] t += ' - '",
"압축 파일 # fname_in_arc - 압축 해제할 파일 이름 # 리턴값 : 압축",
"def get_liner_value(self, num_list): start = None end = None if not start: start",
"''' # 수정된 data를 쓰기 위해 준비한다 if len(data) >= 0x1000: # BBD를",
"0x80 elif del_info: buf = buf[:off] + '\\x00' * 0x80 + buf[off+0x80:] if",
"== x: e = x loop = True continue else: while loop: if",
"= 0xfffffffe # 링크 끝 설정하기 # 남은 링크는 모두 0xffffffff로 설정하기 for",
"self.parent.ssize off = (self.parent.small_block[n / div_n] + 1) * self.parent.bsize off += (n",
"/ 4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) # 사용하지 않는 BBD 링크를 찾는다. free_link = [i",
"뒤쪽에 추가하기 t_num = len(t_data) / self.bsize # 몇개의 블록이 필요한가? self.__add_big_block_num(t_num) #",
"return True # --------------------------------------------------------------------- # PPS 전체 경로 구하기 (내장) # --------------------------------------------------------------------- def",
"if self.pps[node]['Dir'] != 0xFFFFFFFFL: self.__deep += 1 self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep -= 1 if",
"열기 self.handle[filename] = zfile return zfile # --------------------------------------------------------------------- # arclist(self, filename, fileformat) #",
"+= '\\xff\\xff\\xff\\xff' * ((self.bsize/4) - 1) if i != (x_num-1): x_data += struct.pack('<L',",
"용량 add_big_num = (size / self.bsize) + (1 if (size % self.bsize) else",
"attach_data else: special_no = [] # 특수 목적의 Big Block 번호. 해당 블록은",
"+= x_data + b_data + add_data + attach_data # 특수 블록에 BBD list도",
"데이터 # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) #",
"return ret # --------------------------------------------------------------------- # 스트림이 존재하는가? # --------------------------------------------------------------------- def exists(self, name): for",
"self.read_size = self.parent.bsize self.fat = self.parent.bbd_fat else: self.read_size = self.parent.ssize self.fat = self.parent.sbd_fat",
"None self.bbd_list_array = None self.bbd = None self.bbd_fat = {} self.sbd = None",
"self.bsize) + (1 if (len(data) % self.bsize) else 0) t_data = data +",
"# 이전 BBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.bbd) # 이전",
"분석한다. # 입력값 : filehandle - 파일 핸들 # filename - 파일 이름",
"fileformat - 파일 포맷 분석 정보 # 리턴값 : [[압축 엔진 ID, 압축된",
"# 스트림 전용 클래스 # ----------------------------------------------------------------- class Stream: def __init__(self, parent, node): self.parent",
"if filename in self.handle: # 이전에 열린 핸들이 존재하는가? zfile = self.handle.get(filename, None)",
"분석 정보} or None # --------------------------------------------------------------------- def format(self, filehandle, filename, filename_ex): ret =",
"0xFFFFFFFFL: self.__deep += 1 self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep -= 1 if self.pps[node]['Prev'] != 0xFFFFFFFFL:",
"= get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_num = 0 if (len(t_link) *",
"--------------------------------------------------------------------- def getinfo(self): # 플러그인 엔진의 주요 정보 info = dict() # 사전형",
"pics.read() o.close() ''' # XBBD 늘어나는 경우 # o = OleFile('xbbd2.ppt', write_mode=True, verbose=True)",
"수 있는 Big Block 개수 for no in special_no: seg = no /",
"p['Start'] tname = p['Name'].encode(sys.stdout.encoding, 'replace') print ' ' + '%2d %-35s %d %22s",
"self.ssize = ssize self.bbd = bbd self.bbd_fat = bbd_fat self.sbd = sbd self.sbd_fat",
"== -1: raise Error('PPS name is invalid.') return Stream(self, no) # --------------------------------------------------------------------- #",
"t_num = 0 if (len(t_link) * self.bsize) < len(t_data): # 블록 추가해야 하나?",
"SBD') # 섹터가 변화는 것은 Dec, Inc가 의미 없음 n = (len(data) /",
"목록 추출하기 o = self.__get_handle(filename) for name in o.listdir(): file_scan_list.append(['arc_ole', name]) return file_scan_list",
"추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_bbd_link(t_link, n)",
"구하기 self.bsize = 1 << kavutil.get_uint16(self.mm, 0x1e) self.ssize = 1 << kavutil.get_uint16(self.mm, 0x20)",
"# file_infos - 압축 대상 파일 정보 구조체 # 리턴값 : 압축 성공",
"for i, n in enumerate(t_link): off = (n + 1) * self.bsize self.mm",
"False for x in num_list: if e + 1 == x: e =",
"zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d) o.close() ''' # ------------------------------------------------------------------------- # KavMain 클래스 # ------------------------------------------------------------------------- class",
"--------------------------------------------------------------------- def __get_handle(self, filename): if filename in self.handle: # 이전에 열린 핸들이 존재하는가?",
"len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data) / self.bsize #",
"bbd_or_sbd_fat): ret = [] fat = bbd_or_sbd_fat next_b = no if next_b !=",
"def close(self): if self.isfile: self.fp.close() if self.write_mode: open(self.fname, 'wb').write(self.mm) # --------------------------------------------------------------------- # OLE",
"for fname in self.handle.keys(): zfile = self.handle[fname] zfile.close() self.handle.pop(fname) # --------------------------------------------------------------------- # mkarc(self,",
"pps['Size'] if size >= 0x1000: self.read_size = self.parent.bsize self.fat = self.parent.bbd_fat else: self.read_size",
"i != (x_num-1): x_data += struct.pack('<L', last_no+1) # 다음 블록을 가리켜야 함으로 1를",
"PPS가 없음 return False while len(f): x = f.pop(0) try: if self.pps[x]['Type'] !=",
"self.verbose: open('sbd.dmp', 'wb').write(self.sbd) print kavutil.vprint('SBD') kavutil.vprint(None, 'Start Blocks', '%d' % sbd_startblock) kavutil.vprint(None, 'Num",
"in self.__full_list: if p['Name'] == name: no = p['Node'] break else: no =",
"# pps에 ListView.2의 CLSID가 존재함 # 참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657",
"self.__deep += 1 self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep -= 1 if self.pps[node]['Prev'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'],",
"and streams: ret.append(p['Name']) elif p['Type'] == 1 and storages: ret.append(p['Name']) else: pass return",
"if num_of_xbbd_blocks == 1: t_data = get_bblock(self.mm, next_b, self.bsize) else: t_data = ''",
"or None # --------------------------------------------------------------------- def unarc(self, arc_engine_id, arc_name, fname_in_arc): data = None if",
"# --------------------------------------------------------------------- # uninit(self) # 플러그인 엔진을 종료한다. # 리턴값 : 0 -",
"'%4d ' % p['Dir'] t += ' - ' if p['Start'] == 0xffffffff",
"!= 0xffffffff and self.pps[0]['Type'] == 5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] = True if len(f)",
"t_data = data + ('\\x00' * ((n*self.ssize) - len(data))) # 여분의 크기를 data",
"+ pps_name # print (\"%02d : %d %s\") % (node, self.deep, name) #",
"self.bbd_fat) if self.verbose: print kavutil.vprint('Small Blocks') print self.small_block return True # --------------------------------------------------------------------- #",
"else: # 기존에는 SBD 사용 if org_size >= len(data): # raise error('Not Support",
"and target_pps['Type'] == 1 and delete_storage: # 유효한 스토리지? t = ow.delete(no) #",
"# BBD를 모은다 bbd = '' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array,",
"block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data))",
"# 기존에는 BBD 사용 # raise error('Not Support : BBD -> SBD') #",
": 설정 크기 # start : 시작 링크 # --------------------------------------------------------------------- def __set_pps_header(self, node,",
"------------------------------------------------------------------------- class Error(Exception): pass # --------------------------------------------------------------------- # MisiBase64 인코더 디코더 # --------------------------------------------------------------------- def",
"필요 없음 # 잔여 개수 체크하기 last_no = (size / self.bsize) - 2",
"% self.ssize) else 0) self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된 마지막",
"(x & 0x90900000) == 0x90900000: # CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return False else: #",
"i, no in enumerate(bbd_no): off = get_bbd_list_index_to_offset(self.mm, old_num_bbd + i) # print hex(off)",
"else: # raise error('Not Support : SBD -> SBD (Inc)') # 작업 완료",
"link 개수 # --------------------------------------------------------------------- def __modify_small_block_link(self, old_link, add_num): if add_num < 0: return",
"n, self.bsize) off = ((node % 4) * 0x80) if del_info and off",
"if self.verbose: print if num_of_bbd_blocks < 109: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 4) else:",
"t = self.bsize - n sbd += '\\xff' * t if self.verbose: open('sbd.dm3',",
"free_link[:add_num] # SBD에 링크 연결하기 else: # 이전 링크가 없다면... ret_link = free_link[:add_num]",
"node를 찾아서 del_pps의 next_no를 등록한다. blank_next_no = self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no) elif prev_no !=",
"+ struct.pack('<L', total_bbd_num) + self.mm[0x30:] last_no += 1 # XBBD 처리하기 if total_bbd_num",
"= open(input_data, 'rb') buf = self.fp.read() else: buf = input_data else: raise Error('Input",
"kavutil.vprint(None, 'Small Block Size', '%d' % self.ssize) print kavutil.HexDump().Buffer(self.mm, 0, 0x60) print if",
"1) * self.bsize self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] # --------------------------------------------------------------------- #",
"블록 읽기 # --------------------------------------------------------------------- def get_bblock(buf, no, bsize): off = (no+1) * bsize",
"self.bsize if n: t = self.bsize - n sbd += '\\xff' * t",
"%-8s' % ('No', 'Name', 'Type', 'Prev', 'Next', 'Dir', 'SB', 'Size') print ' '",
"range(len(num_list)): num_list.pop(0) end = e return start, end def read(self): pps = self.parent.pps[self.node]",
"buf != 'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not Ole signature') # big block, small bloc 크기",
"클래스 # --------------------------------------------------------------------- class OleWriteStream: def __init__(self, mm, pps, bsize, ssize, bbd, bbd_fat,",
"((n*self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.sbd_fat)",
"bb_num off = no % bb_num # print hex(no), hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4))",
"xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(self.mm, next_b, self.bsize) print kavutil.HexDump().Buffer(self.mm, (next_b+1)",
"= self.mm[:off] + t_data[i * self.bsize:(i + 1) * self.bsize] + self.mm[off +",
"주요 정보를 알려준다. (제작자, 버전, ...) # 리턴값 : 플러그인 엔진 정보 #",
"add_data + attach_data # 특수 블록에 BBD list도 추가 special_no += bbd_no #",
"delete(self, del_no): del_pps = self.pps[del_no] prev_no = del_pps['Prev'] next_no = del_pps['Next'] dir_no =",
"# 특수 블록 등록 last_no += 1 # END of XBBD # BBD",
"if self.verbose: open('sbd.dm2', 'wb').write(sbd) # SBD 링크를 생성한다. sbd_link = [] for i",
"여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기",
"get_bbd_list_array(self.mm) self.bbd = '' for i in range(len(bbd_list_array)/4): n = kavutil.get_uint32(bbd_list_array, i*4) self.bbd",
"i in range(len(self.sbd) / 4): t_link.append(kavutil.get_uint32(self.sbd, i * 4)) t = org_link_list[num_link:] org_link_list",
"sbd : 수정된 SBD 이미지 # --------------------------------------------------------------------- def __modify_sbd(self, sbd): # 원래 이미지에",
"prev_no elif prev_no == 0xffffffff and next_no != 0xffffffff: # Next만 존재 #",
"pps_name # print (\"%02d : %d %s\") % (node, self.deep, name) # if",
"idx): num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) # num_of_xbbd_blocks = kavutil.get_uint32(buf,",
"self.ssize) + (1 if (t_size % self.ssize) else 0) self.__add_small_block_num(t_num) # 필요한 블록",
"num_of_xbbd_blocks # 추가해야 할 XBBD 개수 add_num += x_num b_num = (add_num /",
"# END of XBBD # BBD 추가하기 bbd_no = [] b_data = '\\xff'",
"add_data = '' add_num = num - n # 추가해야 할 블록 수",
"= None self.bbd = None self.bbd_fat = {} self.sbd = None self.root =",
"Error: pass o.close() return ret # --------------------------------------------------------------------- # __get_handle(self, filename) # 압축 파일의",
"데이터를 small block 링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_small_bolck(self, t_data,",
"{} for i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i * 4)",
"bbd[:no*4] + data + bbd[(no+1)*4:] no = t_link[-1] bbd = bbd[:no * 4]",
"pass return ret # --------------------------------------------------------------------- # 스트림이 존재하는가? # --------------------------------------------------------------------- def exists(self, name):",
"= node while True: pps = self.pps[no] if pps['Next'] == 0xffffffff: # 더이상",
"--------------------------------------------------------------------- # 스트림 또는 스토리지를 삭제한다. # --------------------------------------------------------------------- def delete(self, name, delete_storage=False, reset_stream=False):",
"이름 info['make_arc_type'] = kernel.MASTER_PACK # 악성코드 치료 후 재압축 유무 info['sig_num'] = len(self.listvirus())",
"= None self.exploit = [] # 취약점 존재 여부 # 임시 변수 self.__deep",
"arc_engine_id, arc_name, fname_in_arc): data = None if arc_engine_id == 'arc_ole': o = self.__get_handle(arc_name)",
"성공, 0 이외의 값 - 실패 # --------------------------------------------------------------------- def uninit(self): # 플러그인 엔진",
"tname = p['Name'].encode(sys.stdout.encoding, 'replace') print ' ' + '%2d %-35s %d %22s %8d'",
"no in enumerate(bbd_link) if (no == 0xffffffff)] if old_link: ret_link = old_link +",
"= struct.pack('<L', total_xbbd_num) self.mm = self.mm[:0x48] + data + self.mm[0x4C:] # XBBD 블록",
"+ buf[t_off + 4:] if pps_next is not None: t_off = off +",
"in range(num_of_xbbd_blocks-1): t_data = get_bblock(self.mm, next_b, self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) # 기존",
"kavutil.get_uint32(t, off*4) # print hex(t) # BBD List에 BBD 등록하기 for i, no",
"4:] if start is not None: t_off = off + 0x74 buf =",
"self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) # 기존 XBBD 마지막에 새로운 XBBD 링크 추가",
"kavutil.vprint(None, 'Big Block Size', '%d' % self.bsize) kavutil.vprint(None, 'Small Block Size', '%d' %",
"buf = buf[:off] + '\\x00' * 0x80 + buf[off+0x80:] if size is not",
"분석한 노드의 경우 더이상 분석하지 않기 위해 처리 f = [] if len(self.pps)",
"스트림이 존재하는가? # --------------------------------------------------------------------- def exists(self, name): for p in self.__full_list: if p['Name']",
"= self.__modify_small_block_link(None, t_num) bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) self.bbd = '' for",
"+= 1 self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep -= 1 if self.pps[node]['Prev'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix)",
"+ t_data[i * self.bsize:(i + 1) * self.bsize] + self.mm[off + self.bsize:] #",
"사전형 변수 선언 info['author'] = '<NAME>' # 제작자 info['version'] = '1.1' # 버전",
"= self.parent.bsize self.fat = self.parent.bbd_fat else: self.read_size = self.parent.ssize self.fat = self.parent.sbd_fat list_array",
"+ 1) * self.read_size data += self.parent.mm[off:off + self.read_size * (e - s",
"get_bblock(self.mm, next_b, self.bsize) else: t_data = '' for i in range(num_of_xbbd_blocks-1): t_data =",
"% sbd_startblock) kavutil.vprint(None, 'Num of SBD Blocks', '%d' % num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd, 0,",
"self.__full_list: if p['Type'] == 2 and streams: ret.append(p['Name']) elif p['Type'] == 1 and",
"* 4) # --------------------------------------------------------------------- # OLE 파일인지 확인한다. # --------------------------------------------------------------------- def is_olefile(filename): try:",
"유효한 PPS에 대한 삭제인지 확인 if reset_stream: size = target_pps['Size'] t = ow.write(no,",
"self.root_list_array, self.small_block, self.verbose) target_pps = self.pps[no] if target_pps['Valid'] and target_pps['Type'] == 2: #",
"* 4:] # SBD가 나누어 bsize 단위가 아니면 맞춘다. n = len(sbd) %",
"== 'arc_ole': o = self.__get_handle(arc_name) fp = o.openstream(fname_in_arc) try: data = fp.read() except:",
"찾는다. free_link = [i for i, no in enumerate(sbd_link) if (no == 0xffffffff",
"no = t_link[i] bbd = bbd[:no*4] + data + bbd[(no+1)*4:] no = t_link[-1]",
"in range(len(sbd) / 4): sbd_link.append(kavutil.get_uint32(sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link",
"블록이 필요한가? self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # SBD 링크를 처음 생성하므로",
"i in range(x_num): x_data += '\\xff\\xff\\xff\\xff' * ((self.bsize/4) - 1) if i !=",
"영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else:",
"return (next_b + 1) * bsize + (off * 4) # --------------------------------------------------------------------- #",
"no) no = t_link[i] bbd = bbd[:no*4] + data + bbd[(no+1)*4:] no =",
"self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # OleWriteStream 클래스 # --------------------------------------------------------------------- class",
"추가 블록 개수만 파일 뒤에 추가하기 self.mm += '\\x00' * self.bsize * num",
"continue else: # 0x3800 - 0x383F # the value contains two characters ch",
": 필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_bbd_link(self, org_link_list, num_link): if",
"is_olefile(filename): try: buf = open(filename, 'rb').read(8) if buf == 'D0CF11E0A1B11AE1'.decode('hex'): return True except",
"dict() # 사전형 변수 선언 info['author'] = '<NAME>' # 제작자 info['version'] = '1.1'",
"return False else: # CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return False self.pps[x]['Valid'] = True if",
"플러그인 엔진의 주요 정보를 알려준다. (제작자, 버전, ...) # 리턴값 : 플러그인 엔진",
"= get_block_link(org_sb, fat) ''' # 수정된 data를 쓰기 위해 준비한다 if len(data) >=",
"------------------------------------------------------------------------- # 메시지 출력 함수 # ------------------------------------------------------------------------- __version__ = '1.0' # ------------------------------------------------------------------------- #",
"잔여 데이터 # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm)",
"전체 링크 수 # --------------------------------------------------------------------- def __decrease_bbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link:",
"= o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'], o.sbd) # d2 = pics.read() o.close() ''' # XBBD",
"self.verbose: open('sbd.dm2', 'wb').write(sbd) # SBD 링크를 생성한다. sbd_link = [] for i in",
"kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks = kavutil.get_uint32(self.mm, 0x40) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) self.sbd = ''",
"= None # print self.parent.verbose # 연속된 숫자 값을 리턴한다. # TODO :",
"삭제한다. # t_link = get_block_link(org_sb, self.sbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb,",
"self.__modify_big_block_link(t_link, t_num) # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS",
"org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]] = 0xfffffffe # 링크 끝 설정하기 # 남은",
"ret.append(next_b) except KeyError: break return ret # --------------------------------------------------------------------- # OLE 블록 읽기 #",
"i in range(b_num): bbd_no.append(last_no) last_no += 1 # 최종 조합 self.mm += x_data",
"name = '_\\x00' + pps[2:t_size-2] else: name = pps[0:t_size-2] p['Name'] = DecodeStreamName(name).decode('UTF-16LE', 'replace')",
"pps[0x50:0x60] in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return False self.pps.append(p) # PPS Tree 검증 if self.__valid_pps_tree()",
"경로 구하기 (내장) # --------------------------------------------------------------------- def __get_pps_path(self, node=0, prefix=''): if node == 0:",
"존재하는가? if prev_no != 0xffffffff and next_no != 0xffffffff: # 양쪽 모두 노트가",
"p in self.pps: print ' ' + '%2d %-23s %d %8X %8X %8X",
"o.close() return ret # --------------------------------------------------------------------- # __get_handle(self, filename) # 압축 파일의 핸들을 얻는다.",
"# BBD 링크를 줄인다 # org_link_list : 기존 Small block 링크 # num_link",
"pics.read() d = zlib.decompress(d, -15) d = d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') # var -> foo",
"0xffffffff and self.pps[0]['Type'] == 5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] = True if len(f) ==",
"# SBD 링크를 생성한다. sbd_link = [] for i in range(len(sbd) / 4):",
"verbose: kavutil.vprint(None, 'Num of BBD Blocks', '%d' % num_of_bbd_blocks) kavutil.vprint(None, 'XBBD Start', '%08X'",
"target_pps['Valid'] and target_pps['Type'] == 2: # 유효한 PPS에 대한 삭제인지 확인 if reset_stream:",
"p['Prev'] t += ' - ' if p['Next'] == 0xffffffff else '%4d '",
"% self.ssize) else 0) t_data = data + ('\\x00' * ((n*self.ssize) - len(data)))",
"print kavutil.vprint('ROOT') kavutil.vprint(None, 'Start Blocks', '%d' % root_startblock) print kavutil.HexDump().Buffer(self.root, 0, 0x80) #",
"prefix) return 0 # --------------------------------------------------------------------- # PPS 전체 경로 구하기 (스트림만 출력) #",
"else: self.read_size = self.parent.ssize self.fat = self.parent.sbd_fat list_array = get_block_link(sb, self.fat) data =",
"* 2)) for ch in wch: if 0x3800 <= ch <= 0x4840: if",
"prev 노드 값을 root로 보낸다. t_no = prev_no # 2. prev 노드 하위에",
"= self.__get_handle(arc_name) fp = o.openstream(fname_in_arc) try: data = fp.read() except: data = None",
"--------------------------------------------------------------------- # 스트림을 연다 # --------------------------------------------------------------------- def openstream(self, name): # ----------------------------------------------------------------- # 스트림",
"f.pop(0) try: if self.pps[x]['Type'] != 1 and self.pps[x]['Type'] != 2 and len(self.pps[x]['Name']) ==",
"_, _ = get_bbd_list_array(self.mm) bb_num = (self.bsize/4) # 한개의 BBD list 블록에 들어갈",
"self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data) /",
"# Root 수정, Next 수정 o.close() ''' o = OleFile('normal.hwp', verbose=True) pics =",
"t_link = self.__modify_big_block_link(None, t_num) # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link)",
"self.bsize:] # --------------------------------------------------------------------- # 특정 데이터를 small block 링크를 따라 데이터 쓰기 (내장)",
"' if p['Next'] == 0xffffffff else '%4d ' % p['Next'] t += '",
"no, self.bsize) bbd_link = [] for i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4))",
"== -1: raise Error('PPS name is invalid.') # print no ow = OleWriteStream(self.mm,",
"for ch in wch: if 0x3800 <= ch <= 0x4840: if ch >=",
"종료 return 0 # 플러그인 엔진 종료 성공 # --------------------------------------------------------------------- # getinfo(self) #",
"if add_num < 0: return [] # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks,",
"Error('PPS name is invalid.') return Stream(self, no) # --------------------------------------------------------------------- # 스트림의 데이터를 덮어쓴다.",
"return # 추가할 필요 없음 else: # 여유분이 부족함. 따라서 Root를 늘려야 함",
"if self.verbose: open('bbd.dm2', 'wb').write(bbd) bbd_link = [] for i in range(len(bbd) / 4):",
"if next_b in ret: # 이미 링크가 존재하면 종료 break ret.append(next_b) except KeyError:",
"False) # --------------------------------------------------------------------- def mkarc(self, arc_engine_id, arc_name, file_infos): if arc_engine_id == 'arc_ole': o",
"OLE 재로딩 # --------------------------------------------------------------------- # OleWriteStream 클래스 # --------------------------------------------------------------------- class OleWriteStream: def __init__(self,",
"# verbose - 디버그 모드 (True or False) # 리턴값 : 0 -",
"'Start Blocks', '%d' % root_startblock) print kavutil.HexDump().Buffer(self.root, 0, 0x80) # sbd 읽기 sbd_startblock",
"n = kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i] = n if self.verbose: open('bbd.dmp', 'wb').write(self.bbd) print kavutil.vprint('BBD')",
"ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD link 추가 요청한다. (원본 이미지의",
"self.bsize] + self.mm[off + self.bsize:] # --------------------------------------------------------------------- # 특정 데이터를 small block 링크를",
"del_info=False): n = self.root_list_array[node / 4] buf = get_bblock(self.mm, n, self.bsize) off =",
"0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if idx >= num_of_bbd_blocks: # 범위를",
"kavutil # ------------------------------------------------------------------------- # 메시지 출력 함수 # ------------------------------------------------------------------------- __version__ = '1.0' #",
"Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0])",
"크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 BBD의 링크는 모두",
"if self.pps[node]['Next'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix) return 0 # --------------------------------------------------------------------- # PPS 전체",
"= t_link[i+1] data = struct.pack('<L', no) no = t_link[i] bbd = bbd[:no*4] +",
"Block을 주어진 개수만큼 추가한다. # num : 추가할 Big Block 개수 # ---------------------------------------------------------------------",
"종료 성공 # --------------------------------------------------------------------- # getinfo(self) # 플러그인 엔진의 주요 정보를 알려준다. (제작자,",
"< 109: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 4) else: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 109)",
"def __init__(self, input_data, write_mode=False, verbose=False): self.verbose = verbose # 디버깅용 self.isfile = False",
"1) * self.bsize) + (off * 4) self.mm = self.mm[:t_off] + '\\xfd\\xff\\xff\\xff' +",
"= get_bbd_list_index_to_offset(self.mm, old_num_bbd + i) # print hex(off) self.mm = (self.mm[:off] + struct.pack('<L',",
"+ struct.pack('<L', no) + self.mm[off+4:]) # --------------------------------------------------------------------- # Small Block을 주어진 개수만큼 추가한다.",
"!= 5: # Stream만 저장 p = {'Node': node, 'Name': name[1:], 'Type': self.pps[node]['Type']}",
"self.ssize != 0x40: # 이상 파일 정보 처리 return False # bbd 읽기",
"삭제한다. # --------------------------------------------------------------------- def delete(self, name, delete_storage=False, reset_stream=False): for p in self.__full_list: if",
"kavutil.HexDump().Buffer(self.sbd, 0, 0x80) # PPS 읽기 self.pps = [] for i in range(len(self.root)",
"pass # ----------------------------------------------------------------- for p in self.__full_list: if p['Name'] == name: no =",
"추가하기 # BBD 링크를 처음 생성하므로 이전 링크가 없다. t_link = self.__modify_big_block_link(None, t_num)",
"= get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_num = 0 if (len(t_link) *",
"0 # --------------------------------------------------------------------- # PPS 전체 경로 구하기 (스트림만 출력) # --------------------------------------------------------------------- def",
"----------------------------------------------------------------- class Stream: def __init__(self, parent, node): self.parent = parent self.node = node",
"__get_max_node(self, node): # 특정 노드의 Max 값을 가진 node를 찾기 no = node",
"if pps_next is not None: t_off = off + 0x48 buf = buf[:t_off]",
"else 0) t_data = data + ('\\x00' * ((n * self.ssize) - len(data)))",
"--------------------------------------------------------------------- # MisiBase64 인코더 디코더 # --------------------------------------------------------------------- def MsiBase64Encode(x): ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if",
"next_b = fat[next_b] if next_b == 0xfffffffe: break if len(ret) % 10000 ==",
"추가해야 할 용량 add_big_num = (size / self.bsize) + (1 if (size %",
"(t_num % ((self.bsize - 4) / 4)) else 0) x_num = total_xbbd_num -",
"Blocks', '%d' % num_of_bbd_blocks) kavutil.vprint(None, 'XBBD Start', '%08X' % xbbd_start_block) kavutil.vprint(None, 'Num of",
"= self.fp.read() else: buf = input_data else: raise Error('Input data is invalid.') #",
"close(self): if self.isfile: self.fp.close() if self.write_mode: open(self.fname, 'wb').write(self.mm) # --------------------------------------------------------------------- # OLE 파싱하기",
"in fileformat: try: # OLE Stream 목록 추출하기 o = self.__get_handle(filename) for name",
"value contains two characters ch -= 0x3800 och.append(MsiBase64Encode(ch & 0x3f)) ch = MsiBase64Encode(((ch",
"= get_block_link(r_no, self.bbd_fat) # 이전 Small Block의 링크를 구함 self.__modify_big_block_link(t_link, add_big_num) # 이전",
"= node self.read_size = 0 self.fat = None # print self.parent.verbose # 연속된",
"[] # 취약점 존재 여부 # 임시 변수 self.__deep = None self.__full_list =",
"% 4) * 0x80) if del_info and off == 0x180: buf = buf[:off]",
"Big Block 번호. 해당 블록은 0xfffffffd로 처리해야 함 x_data = '' # b_data",
"info['author'] = '<NAME>' # 제작자 info['version'] = '1.1' # 버전 info['title'] = 'OLE",
"self.pps[node]['Type'] != 5: # Stream만 저장 p = {'Node': node, 'Name': name[1:], 'Type':",
"close(self): pass # ----------------------------------------------------------------- for p in self.__full_list: if p['Name'] == name: no",
"/ div_n] + 1) * self.parent.bsize off += (n % div_n) * self.parent.ssize",
"self.sbd if self.verbose: open('sbd.dm2', 'wb').write(sbd) # SBD 링크를 생성한다. sbd_link = [] for",
"with open(rname, 'rb') as fp: buf = fp.read() # print '[-] filename :',",
"__decrease_bbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: # BBD를 배열로 바꾸기 t_link =",
"filename, filename_ex) # 파일 포맷을 분석한다. # 입력값 : filehandle - 파일 핸들",
"위치 # verbose - 디버그 모드 (True or False) # 리턴값 : 0",
"(내장) # --------------------------------------------------------------------- def __write_data_to_big_block(self, t_data, t_link): for i, n in enumerate(t_link): off",
"--------------------------------------------------------------------- # mkarc(self, arc_engine_id, arc_name, file_infos) # 입력값 : arc_engine_id - 압축 가능",
"압축 파일의 핸들을 얻는다. # 입력값 : filename - 파일 이름 # 리턴값",
"% (node, self.deep, name) # if self.pps[node]['Type'] != 5: # Stream만 저장 p",
"self.bsize) else: t_data = '' for i in range(num_of_xbbd_blocks-1): t_data = get_bblock(self.mm, next_b,",
"수정 됨) # old_link : 기존 SBD link # add_num : 추가 SBD",
"print (\"%02d : %d %s\") % (node, self.deep, name) # if self.pps[node]['Type'] !=",
"= filehandle # OLE 헤더와 동일 if mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] = 'OLE'",
"== 0xffffffff else '%4d ' % p['Prev'] t += ' - ' if",
"in scaned_pps_node: self.pps[x]['Next'] = 0xffffffff else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir'] != 0xffffffff: if",
"없음 else: # 여유분이 부족함. 따라서 Root를 늘려야 함 size = num *",
"%8X %8X %8X %8d' % (self.pps.index(p), p['Name'], p['Type'], p['Prev'], p['Next'], p['Dir'], p['Start'], p['Size'])",
"= { # 포맷 정보를 담을 공간 'Attached_Pos': rsize, 'Attached_Size': fsize - rsize",
"(val & 0x4 == 0x4)} except Error: pass o.close() return ret # ---------------------------------------------------------------------",
"t_list = list(list_array) while len(t_list): s, e = self.get_liner_value(t_list) # 연속된 링크를 모두",
"block 링크 # num_link : 필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def",
"[] for i in range(len(self.root) / 0x80): p = {} pps = self.root[i*0x80:(i+1)*0x80]",
"get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 bbd = self.bbd for no in t_link:",
"isinstance(input_data, types.StringType): if os.path.exists(input_data): self.isfile = True self.fname = input_data self.fp = open(input_data,",
"def __get_root_node(self, node): # 해당 정보를 가진 root를 찾기 for i, pps in",
"print sbd_list_array for i, no in enumerate(sbd_list_array): data = sbd[i*self.bsize:(i+1)*self.bsize] off = (no",
"n self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data,",
"# ------------------------------------------------------------------------- # KavMain 클래스 # ------------------------------------------------------------------------- class KavMain: # --------------------------------------------------------------------- # init(self,",
"''' o = OleFile('normal.hwp', verbose=True) pics = o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'], o.sbd) # d2",
"# OLE 파일인지 확인한다. # --------------------------------------------------------------------- def is_olefile(filename): try: buf = open(filename, 'rb').read(8)",
"b'f\\x00o\\x00o') # var -> foo d = zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d) o.close() ''' #",
"크기 # start : 시작 링크 # --------------------------------------------------------------------- def __set_pps_header(self, node, size=None, start=None,",
"n = kavutil.get_uint32(bbd_list_array, i*4) self.bbd += get_bblock(self.mm, n, self.bsize) # 새로운 Small Block",
"__version__ = '1.0' # ------------------------------------------------------------------------- # 엔진 오류 메시지를 정의 # ------------------------------------------------------------------------- class",
"self.isfile = True self.fname = input_data self.fp = open(input_data, 'rb') buf = self.fp.read()",
"링크 # --------------------------------------------------------------------- # SBD를 수정한다. # sbd : 수정된 SBD 이미지 #",
"root_list_array self.root = '' for no in root_list_array: self.root += get_bblock(self.mm, no, self.bsize)",
"# 출력시 이름이 깨질 가능성이 큼 if ord(pps[0]) & 0xF0 == 0x00 and",
"pps_dir=None, del_info=False): n = self.root_list_array[node / 4] buf = get_bblock(self.mm, n, self.bsize) off",
"+ struct.pack('<L', pps_prev) + buf[t_off + 4:] if pps_next is not None: t_off",
"109: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 4) else: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 109) next_b",
"= [] for i in range(len(self.bbd) / 4): t_link.append(kavutil.get_uint32(self.bbd, i * 4)) t",
"= input_data self.fp = open(input_data, 'rb') buf = self.fp.read() else: buf = input_data",
"PPS 정보를 얻는다 org_sb = self.pps[no]['Start'] org_size = self.pps[no]['Size'] ''' if org_size >=",
"raise Error('PPS name is invalid.') # print no ow = OleWriteStream(self.mm, self.pps, self.bsize,",
"while True: pps = self.pps[no] if pps['Next'] == 0xffffffff: # 더이상 오른쪽이 없으면",
"print self.small_block return True # --------------------------------------------------------------------- # PPS Tree의 유효성을 체크한다. (내장) #",
"(no == 0xffffffff)] if old_link: ret_link = old_link + free_link[:add_num] # 최종 결과의",
"0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix) return 0 # --------------------------------------------------------------------- # PPS 전체 경로 구하기 (스트림만",
"big block, small bloc 크기 구하기 self.bsize = 1 << kavutil.get_uint16(self.mm, 0x1e) self.ssize",
"i in range(len(self.sbd) / 4): n = kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i] = n if",
"__get_root_node(self, node): # 해당 정보를 가진 root를 찾기 for i, pps in enumerate(self.pps):",
"= no / bb_num off = no % bb_num # print hex(no), hex(seg),",
"OleFile(filename) try: pics = o.openstream('FileHeader') d = pics.read() if d[:0x11] == 'HWP Document",
"kavutil.get_uint32(t_data, bsize-4) return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block # --------------------------------------------------------------------- # OLE의 BBD list의",
"!= 1 and self.pps[x]['Type'] != 2 and len(self.pps[x]['Name']) == 0: continue except IndexError:",
"% bb_num # print hex(no), hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no = kavutil.get_uint32(bbd_list_array, seg*4)",
"self.mm = self.mm[:0x44] + data + self.mm[0x4C:] else: data = struct.pack('<L', total_xbbd_num) self.mm",
"모드 (True or False) # 리턴값 : 0 - 성공, 0 이외의 값",
"[] self.parse() # OLE 파일을 분석 def close(self): if self.isfile: self.fp.close() if self.write_mode:",
"self.mm[:off] + t_data[i * self.ssize:(i + 1) * self.ssize] + self.mm[off + self.ssize:]",
"two characters ch -= 0x3800 och.append(MsiBase64Encode(ch & 0x3f)) ch = MsiBase64Encode(((ch >> 6)",
"있는 Big Block 개수 for no in special_no: seg = no / bb_num",
"self.mm[:8] if buf != 'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not Ole signature') # big block, small",
"뒤에 첨부된 파일이 있는지를 조사한다. fsize = len(mm) bsize = 1 << kavutil.get_uint16(mm,",
"0) off = (t_idx % ((bsize / 4) - 1)) next_b = xbbd_start_block",
"엔진 정보 # --------------------------------------------------------------------- def getinfo(self): # 플러그인 엔진의 주요 정보 info =",
"p['Valid'] is False: # 유효한 Tree가 아니면 다음 continue t = '' t",
"data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_num =",
"sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link = [i for i,",
"+ 4:] if pps_prev is not None: t_off = off + 0x44 buf",
"self.__get_pps_path() except IndexError: pass # small block link 얻기 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat)",
"name is invalid.') return Stream(self, no) # --------------------------------------------------------------------- # 스트림의 데이터를 덮어쓴다. #",
"# 스트림 또는 스토리지를 삭제한다. # --------------------------------------------------------------------- def delete(self, name, delete_storage=False, reset_stream=False): for",
"= self.mm[:off] + data + self.mm[off+self.bsize:] # --------------------------------------------------------------------- # BBD를 수정한다. # bbd",
"'arc_ole': o = OleFile(arc_name, write_mode=True) # , verbose=True) # zfile = zipfile.ZipFile(arc_name, 'w')",
"파일 이름 # 리턴값 : 압축 해제된 내용 or None # --------------------------------------------------------------------- def",
"= self.handle.get(filename, None) else: zfile = OleFile(filename, verbose=self.verbose) # ole 파일 열기 self.handle[filename]",
"--------------------------------------------------------------------- def arcclose(self): for fname in self.handle.keys(): zfile = self.handle[fname] zfile.close() self.handle.pop(fname) #",
"o.delete('_VBA_PROJECT_CUR/VBA') # Root 수정, Next 수정 o.close() ''' o = OleFile('normal.hwp', verbose=True) pics",
"None self.exploit = [] # 취약점 존재 여부 # 임시 변수 self.__deep =",
"bbd : 수정된 BBD 이미지 # --------------------------------------------------------------------- def __modify_bbd(self, bbd): self.bbd = bbd",
"리턴값 : 플러그인 엔진 정보 # --------------------------------------------------------------------- def getinfo(self): # 플러그인 엔진의 주요",
"수 add_data = ('\\x00' * self.bsize * add_num) # 추가해야 할 BBD list",
"def __valid_pps_tree(self): scaned_pps_node = [0] # 이미 분석한 노드의 경우 더이상 분석하지 않기",
"self.bsize) < len(t_data): # 블록 추가해야 하나? t_size = len(t_data) - (len(t_link) *",
"= t_link[i+1] data = struct.pack('<L', no) no = t_link[i] sbd = sbd[:no*4] +",
"* self.bsize * num # 실제 필요한 데이터 블록 self.mm += attach_data else:",
"sbd += '\\xff' * t if self.verbose: open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd) # 수정된 SDB",
"self.root = '' for no in root_list_array: self.root += get_bblock(self.mm, no, self.bsize) if",
"숫자 값을 리턴한다. # TODO : 임시로 작성한거라 최적화 필요함 def get_liner_value(self, num_list):",
"kavutil.get_uint32(self.bbd, i * 4) self.bbd_fat[i] = n self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small",
"추가하여 링크를 새롭게 생성 # Root 크기 수정 self.__set_pps_header(0, size=r_size + add_big_num *",
"info['sig_num'] = len(self.listvirus()) # 진단/치료 가능한 악성코드 수 return info # --------------------------------------------------------------------- #",
"0x00: name = '_\\x00' + pps[2:t_size-2] else: name = pps[0:t_size-2] p['Name'] = DecodeStreamName(name).decode('UTF-16LE',",
"핸들 # --------------------------------------------------------------------- def __get_handle(self, filename): if filename in self.handle: # 이전에 열린",
"- ' if p['Start'] == 0xffffffff else '%8X ' % p['Start'] tname =",
"'' for i in t_link: self.sbd += struct.pack('<L', i) # self.mm에 SBD 적용하기",
"BBD 등록하기 for i, no in enumerate(bbd_no): off = get_bbd_list_index_to_offset(self.mm, old_num_bbd + i)",
"buf = buf[:off] + '\\x00' * 0x80 elif del_info: buf = buf[:off] +",
"= '' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd += get_bblock(self.mm,",
"get_bblock(self.mm, n, self.bsize) kavutil.HexDump().Buffer(buf, 0, 0x200) # --------------------------------------------------------------------- # SBD 링크를 줄인다 #",
"data) if t: self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # 스트림 또는",
"# print ret_str.decode('UTF-16LE', 'replace') return ret_str # --------------------------------------------------------------------- # OLE 내부 링크 구하기",
"# 추가해야 할 XBBD 개수 # XBBD를 위한 헤더 수정 if num_of_xbbd_blocks ==",
"File': val = ord(d[0x24]) ret['ff_hwp'] = {'compress': (val & 0x1 == 0x1), 'encrypt':",
"import sys import struct import types import kernel import kavutil # ------------------------------------------------------------------------- #",
"len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전",
"data를 쓰기 위해 준비한다 if len(data) >= 0x1000: # BBD를 사용한다. if org_size",
"start loop = False for x in num_list: if e + 1 ==",
"4)) for i, n in enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) ==",
"self.bbd += get_bblock(self.mm, n, self.bsize) # 새로운 Small Block 링크가 필요하다 self.small_block =",
"get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_bbd_link(t_link, n) # 필요한 개수로",
"+ (1 if (t_size % self.bsize) else 0) self.__add_big_block_num(t_num) # 필요한 블록 수",
"size=len(data)) else: # 기존에는 SBD 사용 # raise error('Not Support : SBD ->",
"old_num_bbd + i) # print hex(off) self.mm = (self.mm[:off] + struct.pack('<L', no) +",
"Big Block 개수 # --------------------------------------------------------------------- def __add_big_block_num(self, num): size = (len(self.mm) / self.bsize)",
"Blocks', '%d' % num_of_xbbd_blocks) if num_of_bbd_blocks > 109: # bbd list 개수가 109보다",
"prefix + '/' + pps_name # print (\"%02d : %d %s\") % (node,",
"- 1)) else 0) off = (t_idx % ((bsize / 4) - 1))",
"get_bblock(self.mm, next_b, self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) # 기존 XBBD 마지막에 새로운 XBBD",
": 기존 Small block 링크 # num_link : 필요로 하는 전체 링크 수",
"len(sbd) % self.bsize if n: t = self.bsize - n sbd += '\\xff'",
"/ 4) - 1)) + (1 if (t_idx % ((bsize / 4) -",
"verbose=False): # 플러그인 엔진 초기화 self.handle = {} self.verbose = verbose return 0",
"할 BBD list 개수는 한개의 BBD에는 bsize / 4 개수만큼 Big Block을 담을",
"return data # --------------------------------------------------------------------- # arcclose(self) # 압축 파일 핸들을 닫는다. # ---------------------------------------------------------------------",
"= [i for i, no in enumerate(bbd_link) if (no == 0xffffffff and i",
"self.bsize-4) ''' if len(self.bbd_list_array)/4 < num_of_bbd_blocks: return False self.bbd = '' for i",
"존재하면 종료 break ret.append(next_b) except KeyError: break return ret # --------------------------------------------------------------------- # OLE",
"pps_next) + buf[t_off + 4:] if pps_dir is not None: t_off = off",
"없는 부분은 제거 attach_data = self.mm[size:] # 파일 뒤에 붙어 있는 잔여 데이터",
"= kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if verbose: kavutil.vprint(None, 'Num",
"pass return False # --------------------------------------------------------------------- # OleFile 클래스 # --------------------------------------------------------------------- class OleFile: def",
"= root_list_array self.root = '' for no in root_list_array: self.root += get_bblock(self.mm, no,",
"영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) return",
"109: # bbd list 개수가 109보다 크면 xbbd를 가져와야 함 next_b = xbbd_start_block",
"'' for no in sbd_list_array: self.sbd += get_bblock(self.mm, no, self.bsize) self.sbd_fat = {}",
"+ (1 if (len(data) % self.ssize) else 0) t_data = data + ('\\x00'",
"# PPS 전체 경로 구하기 (스트림만 출력) # --------------------------------------------------------------------- def listdir(self, streams=True, storages=False):",
"= self.pps[no] if pps['Next'] == 0xffffffff: # 더이상 오른쪽이 없으면 탐색 종료 break",
"SBD 링크를 찾는다. free_link = [i for i, no in enumerate(sbd_link) if (no",
"붙어 있는 잔여 데이터 # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _",
"bbd): self.bbd = bbd # 체크 !!! bbd_list_array, _, _, _ = get_bbd_list_array(self.mm)",
"/ 4): n = kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i] = n if self.verbose: open('bbd.dmp', 'wb').write(self.bbd)",
"0x1000: t_list = list(list_array) while len(t_list): s, e = self.get_liner_value(t_list) # 연속된 링크를",
"0xfffffffe # 링크 끝 설정하기 # 남은 링크는 모두 0xffffffff로 설정하기 for i",
"= root['Size'] r_no = root['Start'] # SBD 링크를 생성한다. sbd_link = [] for",
"OLE 블록 읽기 # --------------------------------------------------------------------- def get_bblock(buf, no, bsize): off = (no+1) *",
"self.bsize fat = self.bbd else: # read_size = self.ssize fat = self.sbd #",
"return False # bbd 읽기 self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose)",
"o.openstream(fname_in_arc) try: data = fp.read() except: data = None return data # ---------------------------------------------------------------------",
"+= (n % div_n) * self.parent.ssize data += self.parent.mm[off:off + self.read_size] if self.parent.verbose:",
"+ ('\\x00' * ((n*self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link",
"= small_block def __get_root_node(self, node): # 해당 정보를 가진 root를 찾기 for i,",
"size=0, start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True) return self.mm def write(self, no, data): #",
"= f.pop(0) try: if self.pps[x]['Type'] != 1 and self.pps[x]['Type'] != 2 and len(self.pps[x]['Name'])",
"scaned_pps_node: self.pps[x]['Next'] = 0xffffffff else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir'] != 0xffffffff: if self.pps[x]['Dir']",
"t += ' - ' if p['Dir'] == 0xffffffff else '%4d ' %",
":', o.write_stream(a_name, buf) # zfile.writestr(a_name, buf) else: # 삭제 처리 o.delete(a_name) except IOError:",
"target_pps['Valid'] and target_pps['Type'] == 1 and delete_storage: # 유효한 스토리지? t = ow.delete(no)",
"kavutil.get_uint32(t_buf, bsize-4) return (next_b + 1) * bsize + (off * 4) #",
"bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) # BBD를 모은다 bbd = '' for",
"else: for i in range(len(num_list)): num_list.pop(0) end = e return start, end def",
"block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정, start 블록",
"정보를 담을 공간 'Attached_Pos': rsize, 'Attached_Size': fsize - rsize } ret['ff_attach'] = fileformat",
"BBD 링크를 찾는다. free_link = [i for i, no in enumerate(bbd_link) if (no",
"# HWP 인가? o = OleFile(filename) try: pics = o.openstream('FileHeader') d = pics.read()",
"오른쪽 노드가 큰 값임 no = pps['Next'] return no def delete(self, del_no): del_pps",
"self.mm = self.mm[:size] # 뒤쪽 쓸모 없는 부분은 제거 attach_data = self.mm[size:] #",
"- 1) - last_no if n >= num: # 잔여 개수가 추가하려는 개수보다",
"verbose self.mm = mm self.pps = pps self.bsize = bsize self.ssize = ssize",
"# 연속된 링크를 모두 수집해서 한꺼번에 파일로 읽기 off = (s + 1)",
"= get_bblock(self.mm, next_b, self.bsize) print kavutil.HexDump().Buffer(self.mm, (next_b+1) * self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4)",
"in range(len(self.sbd) / 4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link",
"if self.pps[0]['Dir'] != 0xffffffff and self.pps[0]['Type'] == 5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] = True",
"1 # XBBD 처리하기 if total_bbd_num > 109: t_num = (total_bbd_num - 109)",
"OLE 영역의 특정 위치에 1개의 Big Block Overwrite하기 (내장) # --------------------------------------------------------------------- def __set_bblock(self,",
"Dir', 'SB', 'Size') print ' ' + ('-' * 74) for p in",
"old_link + free_link[:add_num] # 최종 결과의 BBD 링크 t_link = old_link[-1:] + free_link[:add_num]",
"+= get_bblock(self.mm, n, self.bsize) # 새로운 Small Block 링크가 필요하다 self.small_block = get_block_link(self.pps[0]['Start'],",
"file_info in file_infos: rname = file_info.get_filename() a_name = file_info.get_filename_in_archive() try: if os.path.exists(rname): with",
"!= 0xffffffff: if self.pps[x]['Next'] in scaned_pps_node: self.pps[x]['Next'] = 0xffffffff else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if",
"링크 추가 t_data = t_data[:-4] + struct.pack('<L', last_no) off = (next_b + 1)",
"sbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + sbd[(no + 1) * 4:] # SBD가",
"self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된 마지막 링크 이후에 존재하는 사용하지",
"= get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_bbd_link(t_link, n) # 필요한",
"''' ''' # case1 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('Scripts/DefaultJScript') d",
"None self.__full_list = None self.init(buf) def init(self, buf): # OLE 주요 데이터 self.mm",
"no = pps['Next'] return no def delete(self, del_no): del_pps = self.pps[del_no] prev_no =",
"추가해야 할 XBBD 개수 # XBBD를 위한 헤더 수정 if num_of_xbbd_blocks == 0:",
"%4s %-8s %-8s %-8s %-8s %-8s' % ('No', 'Name', 'Type', 'Prev', 'Next', 'Dir',",
"try: # OLE Stream 목록 추출하기 o = self.__get_handle(filename) for name in o.listdir():",
"enumerate(bbd_no): off = get_bbd_list_index_to_offset(self.mm, old_num_bbd + i) # print hex(off) self.mm = (self.mm[:off]",
"invalid.') return Stream(self, no) # --------------------------------------------------------------------- # 스트림의 데이터를 덮어쓴다. # --------------------------------------------------------------------- def",
"뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_num = 0",
"self.ssize self.mm = self.mm[:off] + t_data[i * self.ssize:(i + 1) * self.ssize] +",
"n = (len(data) / self.ssize) + (1 if (len(data) % self.ssize) else 0)",
"= None if arc_engine_id == 'arc_ole': o = self.__get_handle(arc_name) fp = o.openstream(fname_in_arc) try:",
"range(len(sbd) / 4): sbd_link.append(kavutil.get_uint32(sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link =",
"= data + ('\\x00' * ((n * self.bsize) - len(data))) # 여분의 크기를",
"len(self.pps[x]['Name']) == 0: continue except IndexError: if (x & 0x90900000) == 0x90900000: #",
"free_link = [i for i, no in enumerate(sbd_link) if (no == 0xffffffff)] if",
"range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i] = n if self.verbose: open('bbd.dmp',",
"no if next_b != 0xfffffffe: ret.append(next_b) while True: try: next_b = fat[next_b] if",
"next_no = del_pps['Next'] dir_no = del_pps['Dir'] # root를 찾기 root_no = self.__get_root_node(del_no) #",
"if (no == 0xffffffff and i < size / self.bsize)] if len(free_link) >=",
"링크 t_link = old_link[-1:] + free_link[:add_num] # SBD에 링크 연결하기 else: # 이전",
"types import kernel import kavutil # ------------------------------------------------------------------------- # 메시지 출력 함수 # -------------------------------------------------------------------------",
"idx <= 109: return 0x4c + (idx * 4) else: t_idx = idx",
"모두 삭제한다. # t_link = get_block_link(org_sb, self.sbd) # 이전 링크 수집하기 t_link =",
"0 self.__full_list = [] try: self.__get_pps_path() except IndexError: pass # small block link",
"= input_data else: raise Error('Input data is invalid.') # 수정 모드 self.write_mode =",
"pass o.close() return ret # --------------------------------------------------------------------- # __get_handle(self, filename) # 압축 파일의 핸들을",
"CVE-2012-0158 검사하기 # pps에 ListView.2의 CLSID가 존재함 # 참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고",
"no in t_link: bbd = bbd[:no*4] + '\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:] self.__modify_bbd(bbd) else: #",
"if buf == 'D0CF11E0A1B11AE1'.decode('hex'): return True except IOError: pass return False # ---------------------------------------------------------------------",
"특정 스트림의 크기를 조정한다. (내장) # node : PPS 인덱스 # size :",
"verbose=True) pics = o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'], o.sbd) # d2 = pics.read() o.close() '''",
"t_link[i] bbd = bbd[:no*4] + data + bbd[(no+1)*4:] no = t_link[-1] bbd =",
"Support : SBD -> SBD (Dec)') # 지원 완료 n = (len(data) /",
"list 개수 self.mm = self.mm[:0x2c] + struct.pack('<L', total_bbd_num) + self.mm[0x30:] last_no += 1",
"info['kmd_name'] = 'ole' # 엔진 파일 이름 info['make_arc_type'] = kernel.MASTER_PACK # 악성코드 치료",
"--------------------------------------------------------------------- def delete(self, name, delete_storage=False, reset_stream=False): for p in self.__full_list: if p['Name'] ==",
"len(free_link) >= num: # 여유분이 충분히 존재함... return # 추가할 필요 없음 #",
"data = struct.pack('<L', no) no = t_link[i] bbd = bbd[:no*4] + data +",
"1) * self.bsize self.mm = self.mm[:off] + t_data[i * self.bsize:(i + 1) *",
"total_xbbd_num) self.mm = self.mm[:0x48] + data + self.mm[0x4C:] # XBBD 블록 연결 next_b",
"True: if old_num_bbd + b_num > 109: t_num = (old_num_bbd + b_num -",
"# --------------------------------------------------------------------- # PPS 전체 경로 구하기 (스트림만 출력) # --------------------------------------------------------------------- def listdir(self,",
"data): for p in self.__full_list: if p['Name'] == name: no = p['Node'] break",
"openstream(self, name): # ----------------------------------------------------------------- # 스트림 전용 클래스 # ----------------------------------------------------------------- class Stream: def",
"= 1 << kavutil.get_uint16(buf, 0x1e) if verbose: kavutil.vprint(None, 'Num of BBD Blocks', '%d'",
"% self.bsize) kavutil.vprint(None, 'Small Block Size', '%d' % self.ssize) print kavutil.HexDump().Buffer(self.mm, 0, 0x60)",
"self.parent.pps[self.node] sb = pps['Start'] size = pps['Size'] if size >= 0x1000: self.read_size =",
"# --------------------------------------------------------------------- # unarc(self, arc_engine_id, arc_name, fname_in_arc) # 입력값 : arc_engine_id - 압축",
"# 리턴값 : 플러그인 엔진 정보 # --------------------------------------------------------------------- def getinfo(self): # 플러그인 엔진의",
"# 엔진 파일 이름 info['make_arc_type'] = kernel.MASTER_PACK # 악성코드 치료 후 재압축 유무",
"링크 수집하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 bbd = self.bbd",
"'%d' % self.bsize) kavutil.vprint(None, 'Small Block Size', '%d' % self.ssize) print kavutil.HexDump().Buffer(self.mm, 0,",
"file_scan_list except: pass return [] # --------------------------------------------------------------------- # unarc(self, arc_engine_id, arc_name, fname_in_arc) #",
"0xfffffffe: ret.append(next_b) while True: try: next_b = fat[next_b] if next_b == 0xfffffffe: break",
"1) * self.ssize] + self.mm[off + self.ssize:] # --------------------------------------------------------------------- # OLE 영역의 특정",
"for i in range(len(self.sbd) / 4): n = kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i] = n",
"start = None end = None if not start: start = num_list.pop(0) e",
"+ self.read_size] if self.parent.verbose: print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0, 80) return data[:size] def close(self):",
"data): # 기존 PPS 정보를 얻는다 org_sb = self.pps[no]['Start'] org_size = self.pps[no]['Size'] '''",
"파일 포맷을 분석한다. # 입력값 : filehandle - 파일 핸들 # filename -",
"0x74 buf = buf[:t_off] + struct.pack('<L', start) + buf[t_off + 4:] if pps_prev",
"parent, node): self.parent = parent self.node = node self.read_size = 0 self.fat =",
"self.pps[del_no] prev_no = del_pps['Prev'] next_no = del_pps['Next'] dir_no = del_pps['Dir'] # root를 찾기",
"hex(no), hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no = kavutil.get_uint32(bbd_list_array, seg*4) t_off = ((t_no +",
"pps['Prev'] == node or pps['Next'] == node or pps['Dir'] == node: return i",
"arc_name - 최종적으로 압축될 압축 파일 이름 # file_infos - 압축 대상 파일",
"리스트를 얻는다. # --------------------------------------------------------------------- def get_bbd_list_array(buf, verbose=False): bbd_list_array = buf[0x4c:0x200] # 전체 bbd_list",
"in range(len(self.bbd) / 4): t_link.append(kavutil.get_uint32(self.bbd, i * 4)) t = org_link_list[num_link:] org_link_list =",
"for i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) # 사용하지 않는 BBD 링크를",
"# --------------------------------------------------------------------- def delete(self, name, delete_storage=False, reset_stream=False): for p in self.__full_list: if p['Name']",
"# BBD List에 BBD 등록하기 for i, no in enumerate(bbd_no): off = get_bbd_list_index_to_offset(self.mm,",
"pps['Dir'] == node: return i def __get_max_node(self, node): # 특정 노드의 Max 값을",
"is not None: t_off = off + 0x4C buf = buf[:t_off] + struct.pack('<L',",
"t_link[-1] sbd = sbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + sbd[(no + 1) *",
"# PPS 헤더에 존재하는 특정 스트림의 크기를 조정한다. (내장) # node : PPS",
"내부 파일 이름 # 리턴값 : {파일 포맷 분석 정보} or None #",
"# read_size = self.bsize fat = self.bbd else: # read_size = self.ssize fat",
"# --------------------------------------------------------------------- # 스트림 또는 스토리지를 삭제한다. # --------------------------------------------------------------------- def delete(self, name, delete_storage=False,",
"종료한다. # 리턴값 : 0 - 성공, 0 이외의 값 - 실패 #",
"self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] # --------------------------------------------------------------------- # BBD를 수정한다. #",
"self.parse() # OLE 파일을 분석 def close(self): if self.isfile: self.fp.close() if self.write_mode: open(self.fname,",
"= 0 self.ssize = 0 # 임시 변수 self.__deep = 0 self.__full_list =",
"지원 완료 n = (len(data) / self.ssize) + (1 if (len(data) % self.ssize)",
"buf[t_off + 4:] self.__set_bblock(n, buf) if self.verbose: print buf = get_bblock(self.mm, n, self.bsize)",
"t_size != 0: # 출력시 이름이 깨질 가능성이 큼 if ord(pps[0]) & 0xF0",
"return 0 # 플러그인 엔진 종료 성공 # --------------------------------------------------------------------- # getinfo(self) # 플러그인",
"= self.get_liner_value(t_list) # 연속된 링크를 모두 수집해서 한꺼번에 파일로 읽기 off = (s",
"for i in range(len(bbd_list_array) / 4): no = kavutil.get_uint32(bbd_list_array, i * 4) data",
"open('root.dmp', 'wb').write(self.root) print kavutil.vprint('ROOT') kavutil.vprint(None, 'Start Blocks', '%d' % root_startblock) print kavutil.HexDump().Buffer(self.root, 0,",
"ch) # print ret_str.decode('UTF-16LE', 'replace') return ret_str # --------------------------------------------------------------------- # OLE 내부 링크",
"의미 없음 n = (len(data) / self.ssize) + (1 if (len(data) % self.ssize)",
"CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return False else: # CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return False self.pps[x]['Valid']",
"org_size >= 0x1000: # read_size = self.bsize fat = self.bbd else: # read_size",
"self.sbd += struct.pack('<L', i) # self.mm에 SBD 적용하기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) sbd_list_array",
"t_no = 0xffffffff # root 노드를 수정한다. pps = self.pps[root_no] if pps['Prev'] ==",
"sys import struct import types import kernel import kavutil # ------------------------------------------------------------------------- # 메시지",
"else: # Dir self.__set_pps_header(root_no, pps_dir=t_no) # 삭제 노드 값은 모두 지우기 self.__set_pps_header(del_no, size=0,",
"False # CVE-2012-0158 검사하기 # pps에 ListView.2의 CLSID가 존재함 # 참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/",
"del_no: self.__set_pps_header(root_no, pps_next=t_no) else: # Dir self.__set_pps_header(root_no, pps_dir=t_no) # 삭제 노드 값은 모두",
"- 플러그인 엔진의 위치 # verbose - 디버그 모드 (True or False) #",
"# --------------------------------------------------------------------- # OLE 영역의 특정 위치에 1개의 Big Block Overwrite하기 (내장) #",
"!= 0xffffffff and next_no != 0xffffffff: # 양쪽 모두 노트가 존재함 # 1.",
"= '' for i in t_link: self.bbd += struct.pack('<L', i) # self.mm에 BBD",
"i, no in enumerate(sbd_list_array): data = sbd[i*self.bsize:(i+1)*self.bsize] off = (no + 1) *",
"False) # 리턴값 : 0 - 성공, 0 이외의 값 - 실패 #",
"# mkarc(self, arc_engine_id, arc_name, file_infos) # 입력값 : arc_engine_id - 압축 가능 엔진",
"if (no == 0xffffffff)] if old_link: ret_link = old_link + free_link[:add_num] # 최종",
"return ret # --------------------------------------------------------------------- # __get_handle(self, filename) # 압축 파일의 핸들을 얻는다. #",
"# 리스트형 변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는 악성코드 이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort()",
"MsiBase64Encode(x): ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x > 63: return None return ord(ct[x]) def",
"기존 SBD link # add_num : 추가 SBD link 개수 # --------------------------------------------------------------------- def",
"# PPS 전체 경로 구하기 (내장) # --------------------------------------------------------------------- def __get_pps_path(self, node=0, prefix=''): if",
"Size', '%d' % self.ssize) print kavutil.HexDump().Buffer(self.mm, 0, 0x60) print if self.bsize % 0x200",
"(내장) # --------------------------------------------------------------------- def __valid_pps_tree(self): scaned_pps_node = [0] # 이미 분석한 노드의 경우",
"지우기 self.__set_pps_header(del_no, size=0, start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True) return self.mm def write(self, no,",
"def exists(self, name): for p in self.__full_list: if p['Name'] == name: return True",
"!= 0xFFFFFFFFL: self.__deep += 1 self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep -= 1 if self.pps[node]['Prev'] !=",
"pps_prev=None, pps_next=None, pps_dir=None, del_info=False): n = self.root_list_array[node / 4] buf = get_bblock(self.mm, n,",
"bsize return buf[off:off+bsize] # --------------------------------------------------------------------- # OLE의 BBD 리스트를 얻는다. # --------------------------------------------------------------------- def",
"f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return True # --------------------------------------------------------------------- # PPS 전체 경로 구하기 (내장) #",
"할 XBBD 개수 # XBBD를 위한 헤더 수정 if num_of_xbbd_blocks == 0: data",
"o.close() ''' # XBBD 늘어나는 경우 # o = OleFile('xbbd2.ppt', write_mode=True, verbose=True) #",
"링크 이후에 존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_big_block_link(t_link, t_num) # Big",
"(val & 0x1 == 0x1), 'encrypt': (val & 0x2 == 0x2), 'viewtext': (val",
"* 4) self.bbd_fat[i] = n self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에",
"여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data) / self.ssize # 몇개의 블록이",
"i * 4) self.bbd_fat[i] = n self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block",
"# 특정 데이터를 big block 링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def",
"* self.ssize] + self.mm[off + self.ssize:] # --------------------------------------------------------------------- # OLE 영역의 특정 위치에",
"bsize) next_b = kavutil.get_uint32(t_buf, bsize-4) return (next_b + 1) * bsize + (off",
"self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된 마지막 링크 이후에 존재하는 사용하지",
"# Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정,",
"no = t_link[i] sbd = sbd[:no*4] + data + sbd[(no+1)*4:] no = t_link[-1]",
"* self.bsize self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] # --------------------------------------------------------------------- # BBD를",
"sbd_list_array = get_block_link(sbd_no, self.bbd_fat) # print sbd_list_array for i, no in enumerate(sbd_list_array): data",
"is invalid.') # print no ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat,",
"(size / self.bsize) - 2 # 실제 마지막 Big Block 번호 n =",
"None self.bsize = None self.ssize = None self.bbd_list_array = None self.bbd = None",
"for i in range(num_of_xbbd_blocks): t_data = get_bblock(self.mm, next_b, self.bsize) print kavutil.HexDump().Buffer(self.mm, (next_b+1) *",
"크기 구하기 self.bsize = 1 << kavutil.get_uint16(self.mm, 0x1e) self.ssize = 1 << kavutil.get_uint16(self.mm,",
"for i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i * 4) self.bbd_fat[i]",
"1 << kavutil.get_uint16(self.mm, 0x20) if self.verbose: kavutil.vprint('Header') kavutil.vprint(None, 'Big Block Size', '%d' %",
"= (t_size / self.bsize) + (1 if (t_size % self.bsize) else 0) self.__add_big_block_num(t_num)",
"org_link_list = org_link_list[:num_link] t_link[t[0]] = 0xfffffffe # 링크 끝 설정하기 # 남은 링크는",
"self.bsize self.mm = self.mm[:off] + data + self.mm[off + self.bsize:] if __name__ ==",
"in range(seg): if next_b == 0xfffffffe: return -1 t_buf = get_bblock(buf, next_b, bsize)",
"= pps['Size'] if size >= 0x1000: self.read_size = self.parent.bsize self.fat = self.parent.bbd_fat else:",
"# 지원 완료 n = (len(data) / self.ssize) + (1 if (len(data) %",
"# raise error('Not Support : BBD -> BBD (Inc)') n = (len(data) /",
"None self.bbd = None self.bbd_fat = {} self.sbd = None self.root = None",
"OleWriteStream: def __init__(self, mm, pps, bsize, ssize, bbd, bbd_fat, sbd, sbd_fat, root_list_array, small_block,",
"= None self.__full_list = None self.init(buf) def init(self, buf): # OLE 주요 데이터",
"--------------------------------------------------------------------- # PPS 헤더에 존재하는 특정 스트림의 크기를 조정한다. (내장) # node :",
"# Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정",
"+= '\\xff' * t if self.verbose: open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd) # 수정된 SDB 적용하기",
"완료 n = (len(data) / self.ssize) + (1 if (len(data) % self.ssize) else",
"o.test() ''' # 늘어나는건 경우의 수가 너무 많음 o = OleFile('normal.hwp', write_mode=True, verbose=True)",
"한개의 BBD에는 bsize / 4 개수만큼 Big Block을 담을 수 있음 b_num =",
"if p['Name'] == name: return True else: return False # --------------------------------------------------------------------- # 스트림을",
"class Error(Exception): pass # --------------------------------------------------------------------- # MisiBase64 인코더 디코더 # --------------------------------------------------------------------- def MsiBase64Encode(x):",
"# 1. next 노드 값을 root로 보낸다. t_no = next_no else: # prev_no",
"= (old_num_bbd + b_num - 109) total_xbbd_num = (t_num / ((self.bsize - 4)",
"list() # 리스트형 변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는 악성코드 이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347')",
"# t = kavutil.get_uint32(t, off*4) # print hex(t) # BBD List에 BBD 등록하기",
": 기존 SBD link # add_num : 추가 SBD link 개수 # ---------------------------------------------------------------------",
"self.mm[0x4C:] else: data = struct.pack('<L', total_xbbd_num) self.mm = self.mm[:0x48] + data + self.mm[0x4C:]",
"# --------------------------------------------------------------------- # format(self, filehandle, filename, filename_ex) # 파일 포맷을 분석한다. # 입력값",
"i in range(num_of_xbbd_blocks-1): t_data = get_bblock(self.mm, next_b, self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) #",
"& 0x3f)) ch = MsiBase64Encode(((ch >> 6) & 0x3f)) och.append(ch) ret_str = ''",
"zfile = self.handle[fname] zfile.close() self.handle.pop(fname) # --------------------------------------------------------------------- # mkarc(self, arc_engine_id, arc_name, file_infos) #",
"# 특수 블록 처리 (bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block) bbd_list_array, num_of_bbd_blocks, _, _ =",
"BBD list도 추가 special_no += bbd_no # 특수 블록 처리 (bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks,",
"109 seg = (t_idx / ((bsize / 4) - 1)) + (1 if",
"pps = self.parent.pps[self.node] sb = pps['Start'] size = pps['Size'] if size >= 0x1000:",
"hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no = kavutil.get_uint32(bbd_list_array, seg*4) t_off = ((t_no + 1) * self.bsize)",
"임시로 작성한거라 최적화 필요함 def get_liner_value(self, num_list): start = None end = None",
"Tree 검증 if self.__valid_pps_tree() is False: return False if self.verbose: print kavutil.vprint('Property Storage')",
"존재함... return # 추가할 필요 없음 else: # 여유분이 부족함. 따라서 Root를 늘려야",
"last_no if n >= num: # 잔여 개수가 추가하려는 개수보다 많거나 같으면 추가",
"self.mm def write(self, no, data): # 기존 PPS 정보를 얻는다 org_sb = self.pps[no]['Start']",
"/ self.bsize # 몇개의 블록이 필요한가? self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 #",
"(Dec)') # 개발 완료 n = (len(data) / self.bsize) + (1 if (len(data)",
"xbbd_start_block = kavutil.get_uint32(buf, 0x44) # num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 <<",
"# 이전 링크 수집하기 bbd = self.bbd for no in t_link: bbd =",
"kavutil.vprint(None, 'Start Blocks', '%d' % sbd_startblock) kavutil.vprint(None, 'Num of SBD Blocks', '%d' %",
"(bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block) bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) bb_num = (self.bsize/4)",
"함수 # ------------------------------------------------------------------------- __version__ = '1.0' # ------------------------------------------------------------------------- # 엔진 오류 메시지를 정의",
"찾기 for i, pps in enumerate(self.pps): if pps['Prev'] == node or pps['Next'] ==",
"악성코드의 리스트를 알려준다. # 리턴값 : 악성코드 리스트 # --------------------------------------------------------------------- def listvirus(self): #",
"이전 BBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.bbd) # 이전 링크",
"구한다 bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) # BBD를 모은다 bbd = ''",
"print ' ' + ('-' * 74) for p in self.pps: if p['Valid']",
"+ 1) * self.bsize) + (off * 4) self.mm = self.mm[:t_off] + '\\xfd\\xff\\xff\\xff'",
"+ self.mm[0x4C:] # XBBD 블록 연결 next_b = xbbd_start_block if num_of_xbbd_blocks == 1:",
"for i in t[1:]: t_link[i] = 0xffffffff # BBD 배열을 BBD 버퍼로 바꾸기",
"+ 1) * self.bsize] off = (no + 1) * self.bsize self.mm =",
"self.mm = self.mm[:0x2c] + struct.pack('<L', total_bbd_num) + self.mm[0x30:] last_no += 1 # XBBD",
"연다 # --------------------------------------------------------------------- def openstream(self, name): # ----------------------------------------------------------------- # 스트림 전용 클래스 #",
"(no + 1) * self.bsize self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] #",
"def parse(self): buf = self.mm[:8] if buf != 'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not Ole signature')",
"# OLE 파싱하기 # --------------------------------------------------------------------- def parse(self): buf = self.mm[:8] if buf !=",
"(True or False) # 리턴값 : 0 - 성공, 0 이외의 값 -",
"target_pps['Type'] == 1 and delete_storage: # 유효한 스토리지? t = ow.delete(no) # 링크",
"= total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수 # XBBD를 위한 헤더",
"delete_storage=False, reset_stream=False): for p in self.__full_list: if p['Name'] == name: no = p['Node']",
"t_data = t_data[:-4] + struct.pack('<L', last_no) off = (next_b + 1) * self.bsize",
"data # --------------------------------------------------------------------- # arcclose(self) # 압축 파일 핸들을 닫는다. # --------------------------------------------------------------------- def",
"핸들이 존재하는가? zfile = self.handle.get(filename, None) else: zfile = OleFile(filename, verbose=self.verbose) # ole",
"+ buf[t_off + 4:] if pps_dir is not None: t_off = off +",
"= buf[:t_off] + struct.pack('<L', start) + buf[t_off + 4:] if pps_prev is not",
"노드 하위에 next가 없는 node를 찾아서 del_pps의 next_no를 등록한다. blank_next_no = self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no,",
"'w') for file_info in file_infos: rname = file_info.get_filename() a_name = file_info.get_filename_in_archive() try: if",
"index를 Offset으로 리턴한다. # --------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block",
"p['Start'] == 0xffffffff else '%8X ' % p['Start'] tname = p['Name'].encode(sys.stdout.encoding, 'replace') print",
"size=len(data)) else: # raise error('Not Support : SBD -> SBD (Inc)') # 작업",
"값을 root로 보낸다. t_no = next_no else: # prev_no == 0xffffffff and next_no",
"no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) bbd_link = [] for",
"'arc_ole': o = self.__get_handle(arc_name) fp = o.openstream(fname_in_arc) try: data = fp.read() except: data",
"def write_stream(self, name, data): for p in self.__full_list: if p['Name'] == name: no",
"# PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 BBD의",
"False self.pps.append(p) # PPS Tree 검증 if self.__valid_pps_tree() is False: return False if",
"BBD list 블록에 들어갈 수 있는 Big Block 개수 for no in special_no:",
"# OLE 영역의 특정 위치에 1개의 Big Block Overwrite하기 (내장) # --------------------------------------------------------------------- def",
"fsize - rsize } ret['ff_attach'] = fileformat # HWP 인가? o = OleFile(filename)",
"0x4800) if not ch: continue else: # 0x3800 - 0x383F # the value",
"XBBD를 위한 헤더 수정 if num_of_xbbd_blocks == 0: data = struct.pack('<LL', last_no, total_xbbd_num)",
"엔진 ID # arc_name - 최종적으로 압축될 압축 파일 이름 # file_infos -",
"__get_pps_path(self, node=0, prefix=''): if node == 0: pps_name = '' name = prefix",
"root_list_array self.small_block = small_block def __get_root_node(self, node): # 해당 정보를 가진 root를 찾기",
"찾는다. free_link = [i for i, no in enumerate(bbd_link) if (no == 0xffffffff)]",
"% ((bsize / 4) - 1)) next_b = xbbd_start_block for i in range(seg):",
"bsize + (off * 4) # --------------------------------------------------------------------- # OLE 파일인지 확인한다. # ---------------------------------------------------------------------",
"-1: raise Error('PPS name is invalid.') return Stream(self, no) # --------------------------------------------------------------------- # 스트림의",
"1) * self.bsize if len(data) == self.bsize: self.mm = self.mm[:off] + data +",
"self.ssize)] if len(free_link) >= num: # 여유분이 충분히 존재함... return # 추가할 필요",
"link 얻기 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) if self.verbose: print kavutil.vprint('Small Blocks') print self.small_block",
"(제작자, 버전, ...) # 리턴값 : 플러그인 엔진 정보 # --------------------------------------------------------------------- def getinfo(self):",
"Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not",
"buf) if self.verbose: print buf = get_bblock(self.mm, n, self.bsize) kavutil.HexDump().Buffer(buf, 0, 0x200) #",
"size = pps['Size'] if size >= 0x1000: self.read_size = self.parent.bsize self.fat = self.parent.bbd_fat",
"# XBBD를 위한 헤더 수정 if num_of_xbbd_blocks == 0: data = struct.pack('<LL', last_no,",
"arc_engine_id == 'arc_ole': o = OleFile(arc_name, write_mode=True) # , verbose=True) # zfile =",
"info # --------------------------------------------------------------------- # listvirus(self) # 진단/치료 가능한 악성코드의 리스트를 알려준다. # 리턴값",
"pps_prev is not None: t_off = off + 0x44 buf = buf[:t_off] +",
"t_data[:-4] + struct.pack('<L', last_no) off = (next_b + 1) * self.bsize # t_data의",
"bbd[:no*4] + '\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:] self.__modify_bbd(bbd) else: # 기존에는 SBD 사용 if org_size",
"노드 값을 root로 보낸다. t_no = prev_no # 2. prev 노드 하위에 next가",
"t_link: self.bbd += struct.pack('<L', i) # self.mm에 BBD 적용하기 t, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block",
"== 2: # 유효한 PPS에 대한 삭제인지 확인 if reset_stream: size = target_pps['Size']",
"import os import sys import struct import types import kernel import kavutil #",
"0, 0x80) # Root 읽기 root_startblock = kavutil.get_uint32(self.mm, 0x30) root_list_array = get_block_link(root_startblock, self.bbd_fat)",
"이전 링크 수집하기 bbd = self.bbd for no in t_link: bbd = bbd[:no*4]",
"--------------------------------------------------------------------- def __set_pps_header(self, node, size=None, start=None, pps_prev=None, pps_next=None, pps_dir=None, del_info=False): n = self.root_list_array[node",
"= fp.read() except: data = None return data # --------------------------------------------------------------------- # arcclose(self) #",
"크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_num",
"* self.ssize # 추가해야 할 용량 add_big_num = (size / self.bsize) + (1",
"남은 링크는 모두 0xffffffff로 설정하기 for i in t[1:]: t_link[i] = 0xffffffff #",
"# filename_ex - 압축 파일 내부 파일 이름 # 리턴값 : {파일 포맷",
"# 필요한 블록 수 추가하기 # BBD 링크를 처음 생성하므로 이전 링크가 없다.",
"필요 없음 else: # 여유분이 부족함. 따라서 Root를 늘려야 함 size = num",
"(len(data) / self.ssize) + (1 if (len(data) % self.ssize) else 0) t_data =",
"* 0x80 elif del_info: buf = buf[:off] + '\\x00' * 0x80 + buf[off+0x80:]",
"t = ow.write(no, data) if t: self.init(t) # 새롭게 OLE 재로딩 # ---------------------------------------------------------------------",
"else: t_idx = idx - 109 seg = (t_idx / ((bsize / 4)",
"self.bsize) if self.verbose: open('root.dmp', 'wb').write(self.root) print kavutil.vprint('ROOT') kavutil.vprint(None, 'Start Blocks', '%d' % root_startblock)",
"in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i * 4) self.bbd_fat[i] = n",
"------------------------------------------------------------------------- __version__ = '1.0' # ------------------------------------------------------------------------- # 엔진 오류 메시지를 정의 # -------------------------------------------------------------------------",
"Error('PPS name is invalid.') # print no ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize,",
"# 마지막 블록의 링크는 끝을 처리함 special_no.append(last_no) # 특수 블록 등록 last_no +=",
"if self.pps[node]['Type'] != 5: # Stream만 저장 p = {'Node': node, 'Name': name[1:],",
"t[1:]: t_link[i] = 0xffffffff # SBD 배열을 SBD 버퍼로 바꾸기 self.sbd = ''",
"= MsiBase64Encode(ch - 0x4800) if not ch: continue else: # 0x3800 - 0x383F",
"= ((t_no + 1) * self.bsize) + (off * 4) self.mm = self.mm[:t_off]",
"ret # --------------------------------------------------------------------- # __get_handle(self, filename) # 압축 파일의 핸들을 얻는다. # 입력값",
"'viewtext': (val & 0x4 == 0x4)} except Error: pass o.close() return ret #",
"수집하기 t_link = self.__decrease_sbd_link(t_link, n) # 필요한 개수로 링크 줄이기 # Small block",
"root로 보낸다. t_no = prev_no # 2. prev 노드 하위에 next가 없는 node를",
"t_link[i+1] data = struct.pack('<L', no) no = t_link[i] sbd = sbd[:no*4] + data",
"bbd[(no+1)*4:] self.__modify_bbd(bbd) else: # 기존에는 SBD 사용 if org_size >= len(data): # raise",
"vlist # --------------------------------------------------------------------- # format(self, filehandle, filename, filename_ex) # 파일 포맷을 분석한다. #",
"b_num while True: if old_num_bbd + b_num > 109: t_num = (old_num_bbd +",
"kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if",
"파일로 접근 중인가? if isinstance(input_data, types.StringType): if os.path.exists(input_data): self.isfile = True self.fname =",
"Block을 담을 수 있음 b_num = (add_num / (self.bsize/4)) + (1 if (add_num",
"if x > 63: return None return ord(ct[x]) def DecodeStreamName(name): wch = []",
"False # 파일로 접근 중인가? if isinstance(input_data, types.StringType): if os.path.exists(input_data): self.isfile = True",
"link # add_num : 추가 SBD link 개수 # --------------------------------------------------------------------- def __modify_small_block_link(self, old_link,",
"p['Next'] = kavutil.get_uint32(pps, 0x48) p['Dir'] = kavutil.get_uint32(pps, 0x4c) p['Start'] = kavutil.get_uint32(pps, 0x74) p['Size']",
"= t_link[-1] sbd = sbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + sbd[(no + 1)",
"크기 self.mm = self.mm[:size] # 뒤쪽 쓸모 없는 부분은 제거 attach_data = self.mm[size:]",
"설정하기 for i in t[1:]: t_link[i] = 0xffffffff # BBD 배열을 BBD 버퍼로",
"있는지를 조사한다. fsize = len(mm) bsize = 1 << kavutil.get_uint16(mm, 0x1e) rsize =",
"= (t_idx / ((bsize / 4) - 1)) + (1 if (t_idx %",
"bsize if fsize > rsize: fileformat = { # 포맷 정보를 담을 공간",
"# OLE 블록 읽기 # --------------------------------------------------------------------- def get_bblock(buf, no, bsize): off = (no+1)",
"self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) target_pps = self.pps[no] if target_pps['Valid'] and target_pps['Type'] ==",
"- 109 seg = (t_idx / ((bsize / 4) - 1)) + (1",
"BBD를 모은다 bbd = '' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4)",
"i, no in enumerate(sbd_link) if (no == 0xffffffff)] if old_link: ret_link = old_link",
"OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True) print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') # Root 수정, Next 수정 o.close() '''",
"for i in range(len(self.sbd) / 4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) # 사용하지 않는 SBD 링크를",
"변화는 것은 Dec, Inc가 의미 없음 n = (len(data) / self.ssize) + (1",
"얻는다. # 입력값 : filename - 파일 이름 # fileformat - 파일 포맷",
"0x1000: self.read_size = self.parent.bsize self.fat = self.parent.bbd_fat else: self.read_size = self.parent.ssize self.fat =",
"'' if size >= 0x1000: t_list = list(list_array) while len(t_list): s, e =",
"{'compress': (val & 0x1 == 0x1), 'encrypt': (val & 0x2 == 0x2), 'viewtext':",
"5: # Stream만 저장 p = {'Node': node, 'Name': name[1:], 'Type': self.pps[node]['Type']} self.__full_list.append(p)",
"않기 위해 처리 f = [] if len(self.pps) == 0: # 분석된 PPS가",
"size : 설정 크기 # start : 시작 링크 # --------------------------------------------------------------------- def __set_pps_header(self,",
"# --------------------------------------------------------------------- def __get_pps_path(self, node=0, prefix=''): if node == 0: pps_name = ''",
"= self.mm[:off] + t_data + self.mm[off + self.bsize:] # XBBD 생성하기 for i",
"filename - 파일 이름 # 리턴값 : 압축 파일 핸들 # --------------------------------------------------------------------- def",
"# 포맷 정보를 담을 공간 'Attached_Pos': rsize, 'Attached_Size': fsize - rsize } ret['ff_attach']",
"self.__deep = None self.__full_list = None self.init(buf) def init(self, buf): # OLE 주요",
"# SBD 링크를 처음 생성하므로 이전 링크가 없다. t_link = self.__modify_small_block_link(None, t_num) bbd_list_array,",
"# 압축 파일 내부의 파일 목록을 얻는다. # 입력값 : filename - 파일",
"x_num = total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수 add_num += x_num",
"# --------------------------------------------------------------------- # 특정 데이터를 big block 링크를 따라 데이터 쓰기 (내장) #",
"self.mm[:0x2c] + struct.pack('<L', total_bbd_num) + self.mm[0x30:] last_no += 1 # XBBD 처리하기 if",
"얻는다 org_sb = self.pps[no]['Start'] org_size = self.pps[no]['Size'] ''' if org_size >= 0x1000: #",
"Big Block Overwrite하기 (내장) # --------------------------------------------------------------------- def __set_bblock(self, no, data): off = (no",
"e return start, end def read(self): pps = self.parent.pps[self.node] sb = pps['Start'] size",
"print kavutil.HexDump().Buffer(self.sbd, 0, 0x80) # PPS 읽기 self.pps = [] for i in",
"링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_big_block(self, t_data, t_link): for i,",
"없음 # 잔여 개수 체크하기 last_no = (size / self.bsize) - 2 #",
"for i in range(b_num): bbd_no.append(last_no) last_no += 1 # 최종 조합 self.mm +=",
"return [] # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm)",
"o = OleFile('normal.hwp', write_mode=True, verbose=True) o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True) print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA')",
"self.ssize) else 0) t_data = data + ('\\x00' * ((n*self.ssize) - len(data))) #",
"p['Prev'] == 0xffffffff else '%4d ' % p['Prev'] t += ' - '",
"# 더이상 오른쪽이 없으면 탐색 종료 break else: # 항상 오른쪽 노드가 큰",
"i * 2)) for ch in wch: if 0x3800 <= ch <= 0x4840:",
"self.pps[x]['Valid'] = True if self.pps[x]['Prev'] != 0xffffffff: if self.pps[x]['Prev'] in scaned_pps_node: self.pps[x]['Prev'] =",
"else: # 기존에는 SBD 사용 # raise error('Not Support : SBD -> BBD')",
"수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 BBD의 링크는 모두 삭제한다. # t_link =",
"next가 없는 node를 찾아서 del_pps의 next_no를 등록한다. blank_next_no = self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no) elif",
"kavutil.get_uint32(bbd_list_array, seg*4) t_off = ((t_no + 1) * self.bsize) + (off * 4)",
"# sbd : 수정된 SBD 이미지 # --------------------------------------------------------------------- def __modify_sbd(self, sbd): # 원래",
"이전 Small Block의 링크를 구함 self.__modify_big_block_link(t_link, add_big_num) # 이전 링크에 필요한 블록 수",
"탐색 종료 break else: # 항상 오른쪽 노드가 큰 값임 no = pps['Next']",
"== 1 and delete_storage: # 유효한 스토리지? t = ow.delete(no) # 링크 삭제",
"0 # 플러그인 엔진 종료 성공 # --------------------------------------------------------------------- # getinfo(self) # 플러그인 엔진의",
"open('sbd.dmp', 'wb').write(self.sbd) print kavutil.vprint('SBD') kavutil.vprint(None, 'Start Blocks', '%d' % sbd_startblock) kavutil.vprint(None, 'Num of",
"loop: if e == num_list.pop(0): break end = e break else: for i",
"스토리지를 삭제한다. # --------------------------------------------------------------------- def delete(self, name, delete_storage=False, reset_stream=False): for p in self.__full_list:",
"root = self.pps[0] r_size = root['Size'] r_no = root['Start'] # SBD 링크를 생성한다.",
"BBD List와 XBBD 블록도 추가될 수 있기 때문에... old_b_num = b_num while True:",
"== 0: continue except IndexError: if (x & 0x90900000) == 0x90900000: # CVE-2003-0820",
"self.bsize) bbd_link = [] for i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) #",
"num: # 여유분이 충분히 존재함... return # 추가할 필요 없음 else: # 여유분이",
"(old_num_bbd + b_num - 109) total_xbbd_num = (t_num / ((self.bsize - 4) /",
"(no + 1) * self.bsize self.mm = self.mm[:off] + data + self.mm[off +",
"개수 add_num += x_num b_num = (add_num / (self.bsize / 4)) + (1",
"읽기 self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) ''' # 상당히 많은",
"for p in self.pps: if p['Valid'] is False: # 유효한 Tree가 아니면 다음",
"self.bsize) self.bbd_fat = {} for i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd,",
"연결 next_b = xbbd_start_block if num_of_xbbd_blocks == 1: t_data = get_bblock(self.mm, next_b, self.bsize)",
"elif pps['Next'] == del_no: self.__set_pps_header(root_no, pps_next=t_no) else: # Dir self.__set_pps_header(root_no, pps_dir=t_no) # 삭제",
"repr(self.mm[t_off:t_off+4]) # t = get_bblock(self.mm, t_no, self.bsize) # print repr(t) # t =",
"num: # 잔여 개수가 추가하려는 개수보다 많거나 같으면 추가 블록 개수만 파일 뒤에",
"링크를 생성한다. sbd_link = [] for i in range(len(self.sbd) / 4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4))",
"self.mm = self.mm[:off] + t_data + self.mm[off + self.bsize:] # XBBD 생성하기 for",
"pps['Next'] == node or pps['Dir'] == node: return i def __get_max_node(self, node): #",
"return 0 # 플러그인 엔진 초기화 성공 # --------------------------------------------------------------------- # uninit(self) # 플러그인",
"버전, ...) # 리턴값 : 플러그인 엔진 정보 # --------------------------------------------------------------------- def getinfo(self): #",
"ret_link = old_link + free_link[:add_num] # 최종 결과의 BBD 링크 t_link = old_link[-1:]",
"if self.verbose: print kavutil.vprint('Small Blocks') print self.small_block return True # --------------------------------------------------------------------- # PPS",
"블록을 가리켜야 함으로 1를 더함 else: x_data += '\\xfe\\xff\\xff\\xff' # 마지막 블록의 링크는",
"else: x_data += '\\xfe\\xff\\xff\\xff' # 마지막 블록의 링크는 끝을 처리함 special_no.append(last_no) # 특수",
"[i for i, no in enumerate(bbd_link) if (no == 0xffffffff)] if old_link: ret_link",
"# 플러그인 엔진 초기화 성공 # --------------------------------------------------------------------- # uninit(self) # 플러그인 엔진을 종료한다.",
"return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block # --------------------------------------------------------------------- # OLE의 BBD list의 index를 Offset으로",
"- len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data) / self.bsize",
"링크 t_link = free_link[:add_num] # BBD에 링크 연결하기 for i in range(len(t_link)-1): no",
"0 - 성공, 0 이외의 값 - 실패 # --------------------------------------------------------------------- def init(self, plugins_path,",
"self.mm[off + self.bsize:] if __name__ == '__main__': # import zlib # o =",
"OleFile(arc_name, write_mode=True) # , verbose=True) # zfile = zipfile.ZipFile(arc_name, 'w') for file_info in",
"수 추가하기 # BBD 링크를 처음 생성하므로 이전 링크가 없다. t_link = self.__modify_big_block_link(None,",
"ID # arc_name - 최종적으로 압축될 압축 파일 이름 # file_infos - 압축",
"n = kavutil.get_uint32(self.bbd, i * 4) self.bbd_fat[i] = n self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat)",
"# --------------------------------------------------------------------- # 스트림의 데이터를 덮어쓴다. # --------------------------------------------------------------------- def write_stream(self, name, data): for",
"filehandle, filename, filename_ex): ret = {} mm = filehandle # OLE 헤더와 동일",
"SBD -> SBD (Dec)') # 지원 완료 n = (len(data) / self.ssize) +",
"# arc_name - 최종적으로 압축될 압축 파일 이름 # file_infos - 압축 대상",
"# 연결된 링크 # --------------------------------------------------------------------- # SBD를 수정한다. # sbd : 수정된 SBD",
"링크 연결하기 for i in range(len(t_link)-1): no = t_link[i+1] data = struct.pack('<L', no)",
"t_num = (total_bbd_num - 109) total_xbbd_num = (t_num / ((self.bsize - 4) /",
"= 1 << kavutil.get_uint16(self.mm, 0x1e) self.ssize = 1 << kavutil.get_uint16(self.mm, 0x20) if self.verbose:",
"= (no + 1) * self.bsize self.mm = self.mm[:off] + data + self.mm[off+self.bsize:]",
"vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return vlist # --------------------------------------------------------------------- # format(self, filehandle, filename, filename_ex) # 파일",
"num_of_xbbd_blocks) if num_of_bbd_blocks > 109: # bbd list 개수가 109보다 크면 xbbd를 가져와야",
"# BBD를 배열로 바꾸기 t_link = [] for i in range(len(self.bbd) / 4):",
"t_num = len(t_data) / self.ssize # 몇개의 블록이 필요한가? self.__add_small_block_num(t_num) # 필요한 블록",
"이미 링크가 존재하면 종료 break ret.append(next_b) except KeyError: break return ret # ---------------------------------------------------------------------",
"블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 BBD의 링크는 모두 삭제한다. # t_link",
"None if not start: start = num_list.pop(0) e = start loop = False",
"삭제한다. # t_link = get_block_link(org_sb, self.bbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb,",
"self.bsize:(i + 1) * self.bsize] off = (no + 1) * self.bsize self.mm",
"if len(self.pps) == 0: # 분석된 PPS가 없으면 종료 return False if self.pps[0]['Dir']",
"출력되어 주석 처리 if self.verbose: print if num_of_bbd_blocks < 109: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks",
"None end = None if not start: start = num_list.pop(0) e = start",
"스트림의 데이터를 덮어쓴다. # --------------------------------------------------------------------- def write_stream(self, name, data): for p in self.__full_list:",
"range(len(self.sbd) / 4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link =",
"설정하기 for i in t[1:]: t_link[i] = 0xffffffff # SBD 배열을 SBD 버퍼로",
"sbd self.sbd_fat = sbd_fat self.root_list_array = root_list_array self.small_block = small_block def __get_root_node(self, node):",
"* num # 실제 필요한 데이터 블록 self.mm += attach_data else: special_no =",
"elif target_pps['Valid'] and target_pps['Type'] == 1 and delete_storage: # 유효한 스토리지? t =",
"if old_b_num == b_num: break else: old_b_num = b_num total_bbd_num = old_num_bbd +",
"== b_num: break else: old_b_num = b_num total_bbd_num = old_num_bbd + b_num #",
"= get_block_link(self.pps[0]['Start'], self.bbd_fat) if self.verbose: print kavutil.vprint('Small Blocks') print self.small_block return True #",
"사용 if org_size >= len(data): # raise error('Not Support : BBD -> BBD",
"크기를 data 뒤쪽에 추가하기 t_num = len(t_data) / self.bsize # 몇개의 블록이 필요한가?",
"Error('Invalid call') # --------------------------------------------------------------------- # BBD 링크를 줄인다 # org_link_list : 기존 Small",
"get_bblock(self.mm, no, self.bsize) bbd_link = [] for i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd,",
"__set_pps_header(self, node, size=None, start=None, pps_prev=None, pps_next=None, pps_dir=None, del_info=False): n = self.root_list_array[node / 4]",
"charecter can be decoded ch = MsiBase64Encode(ch - 0x4800) if not ch: continue",
"in range(num_of_xbbd_blocks): t_data = get_bblock(buf, next_b, bsize) bbd_list_array += t_data[:-4] next_b = kavutil.get_uint32(t_data,",
"bbd_link.append(kavutil.get_uint32(bbd, i*4)) # 사용하지 않는 BBD 링크를 찾는다. free_link = [i for i,",
"self.read_size] if self.parent.verbose: print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0, 80) return data[:size] def close(self): pass",
"필요한가? self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # SBD 링크를 처음 생성하므로 이전",
"self.pps[0]['Type'] == 5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] = True if len(f) == 0: #",
"in self.handle: # 이전에 열린 핸들이 존재하는가? zfile = self.handle.get(filename, None) else: zfile",
"0x80): p = {} pps = self.root[i*0x80:(i+1)*0x80] t_size = min(kavutil.get_uint16(pps, 0x40), 0x40) if",
"이외의 값 - 실패 # --------------------------------------------------------------------- def init(self, plugins_path, verbose=False): # 플러그인 엔진",
"self.sbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기",
"계산한다. BBD List와 XBBD 블록도 추가될 수 있기 때문에... old_b_num = b_num while",
"data + self.mm[off+self.bsize:] return True return False # --------------------------------------------------------------------- # PPS 헤더에 존재하는",
"# 파일 포맷을 분석한다. # 입력값 : filehandle - 파일 핸들 # filename",
"SBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.sbd) # 이전 링크 수집하기",
"i, n in enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return",
"(1 if (t_num % ((self.bsize - 4) / 4)) else 0) x_num =",
"%-4s %-4s %-4s %8s %8s' % ('No', 'Name', 'Type', 'Prev', 'Next', ' Dir',",
"- 파일 핸들 # filename - 파일 이름 # filename_ex - 압축 파일",
"= [] b_data = '\\xff' * self.bsize * b_num for i in range(b_num):",
"SBD 배열을 SBD 버퍼로 바꾸기 self.sbd = '' for i in t_link: self.sbd",
"# --------------------------------------------------------------------- def is_olefile(filename): try: buf = open(filename, 'rb').read(8) if buf == 'D0CF11E0A1B11AE1'.decode('hex'):",
"no = t_link[-1] sbd = sbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + sbd[(no +",
"in t_link: bbd = bbd[:no*4] + '\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:] self.__modify_bbd(bbd) else: # 기존에는",
"get_bblock(self.mm, no, self.bsize) if self.verbose: open('root.dmp', 'wb').write(self.root) print kavutil.vprint('ROOT') kavutil.vprint(None, 'Start Blocks', '%d'",
"# 여유분이 부족함. 따라서 Root를 늘려야 함 size = num * self.ssize #",
"Big Block 개수 for no in special_no: seg = no / bb_num off",
"off = (self.small_block[n / 8] + 1) * self.bsize off += (n %",
"# print hex(t) # BBD List에 BBD 등록하기 for i, no in enumerate(bbd_no):",
"알려준다. # 리턴값 : 악성코드 리스트 # --------------------------------------------------------------------- def listvirus(self): # 진단 가능한",
"# --------------------------------------------------------------------- def __modify_sbd(self, sbd): # 원래 이미지에 SBD 덮어쓰기 sbd_no = kavutil.get_uint32(self.mm,",
"ret: # 이미 링크가 존재하면 종료 break ret.append(next_b) except KeyError: break return ret",
"# PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 SBD의",
"(Inc)') # 작업 완료 n = (len(data) / self.ssize) + (1 if (len(data)",
"검사 대상 정보를 모두 가짐 # 미리 분석된 파일 포맷중에 OLE 파일 포맷이",
"1 self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep -= 1 if self.pps[node]['Prev'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix) if",
"값 - 실패 # --------------------------------------------------------------------- def init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화",
"%-8s %-8s %-8s %-8s' % ('No', 'Name', 'Type', 'Prev', 'Next', 'Dir', 'SB', 'Size')",
"4) else: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 109) next_b = xbbd_start_block for i in",
"t = ow.delete(no) # 링크 삭제 if t: self.init(t) # 새롭게 OLE 재로딩",
"prev_no == 0xffffffff and next_no == 0xffffffff: # 단일 노드 # 1. 0xffffffff",
"# 이미 링크가 존재하면 종료 break ret.append(next_b) except KeyError: break return ret #",
"존재하는가? # --------------------------------------------------------------------- def exists(self, name): for p in self.__full_list: if p['Name'] ==",
"org_size >= 0x1000: # 기존에는 BBD 사용 # raise error('Not Support : BBD",
"'_\\x00' + pps[2:t_size-2] else: name = pps[0:t_size-2] p['Name'] = DecodeStreamName(name).decode('UTF-16LE', 'replace') else: p['Name']",
"t: self.init(t) # 새롭게 OLE 재로딩 elif target_pps['Valid'] and target_pps['Type'] == 1 and",
"스트림의 크기를 조정한다. (내장) # node : PPS 인덱스 # size : 설정",
"# 추가할 필요 없음 # 잔여 개수 체크하기 last_no = (size / self.bsize)",
"MsiBase64Encode(((ch >> 6) & 0x3f)) och.append(ch) ret_str = '' for ch in och:",
"add_big_num) # 이전 링크에 필요한 블록 수 추가하여 링크를 새롭게 생성 # Root",
"num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if verbose: kavutil.vprint(None,",
"struct.pack('<LL', last_no, total_xbbd_num) self.mm = self.mm[:0x44] + data + self.mm[0x4C:] else: data =",
"buf[off+0x80:] if size is not None: t_off = off + 0x78 buf =",
"off = (no+1) * bsize return buf[off:off+bsize] # --------------------------------------------------------------------- # OLE의 BBD 리스트를",
"special_no = [] # 특수 목적의 Big Block 번호. 해당 블록은 0xfffffffd로 처리해야",
"not start: start = num_list.pop(0) e = start loop = False for x",
"BBD 추가하기 bbd_no = [] b_data = '\\xff' * self.bsize * b_num for",
"def get_block_link(no, bbd_or_sbd_fat): ret = [] fat = bbd_or_sbd_fat next_b = no if",
"'Next', 'Dir', 'SB', 'Size') print ' ' + ('-' * 74) for p",
"이전 링크 수집하기 t_num = 0 if (len(t_link) * self.ssize) < len(t_data): #",
"self.parent.mm[off:off + self.read_size * (e - s + 1)] else: for n in",
"# 여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data) / self.ssize # 몇개의",
"= old_num_bbd + b_num # 전체 BBD list 개수 self.mm = self.mm[:0x2c] +",
"* bsize return buf[off:off+bsize] # --------------------------------------------------------------------- # OLE의 BBD 리스트를 얻는다. # ---------------------------------------------------------------------",
"-1 if no == -1: raise Error('PPS name(%s) is invalid.' % name) #",
"함 next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(buf, next_b, bsize)",
"range(num_of_xbbd_blocks-1): t_data = get_bblock(self.mm, next_b, self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) # 기존 XBBD",
"self.__get_root_node(del_no) # 양쪽 노드가 존재하는가? if prev_no != 0xffffffff and next_no != 0xffffffff:",
"self.verbose: open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd) # 수정된 SDB 적용하기 return ret_link # 연결된 링크",
"error('Not Support : BBD -> SBD') # 섹터가 변화는 것은 Dec, Inc가 의미",
"if t: self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # OleWriteStream 클래스 #",
"bsize, ssize, bbd, bbd_fat, sbd, sbd_fat, root_list_array, small_block, verbose): self.verbose = verbose self.mm",
"# , verbose=True) # zfile = zipfile.ZipFile(arc_name, 'w') for file_info in file_infos: rname",
"name(%s) is invalid.' % name) # self.init(self.mm) # return ow = OleWriteStream(self.mm, self.pps,",
"!= (x_num-1): x_data += struct.pack('<L', last_no+1) # 다음 블록을 가리켜야 함으로 1를 더함",
"not None: t_off = off + 0x78 buf = buf[:t_off] + struct.pack('<L', size)",
"'Small Block Size', '%d' % self.ssize) print kavutil.HexDump().Buffer(self.mm, 0, 0x60) print if self.bsize",
"def getinfo(self): # 플러그인 엔진의 주요 정보 info = dict() # 사전형 변수",
"if self.write_mode: open(self.fname, 'wb').write(self.mm) # --------------------------------------------------------------------- # OLE 파싱하기 # --------------------------------------------------------------------- def parse(self):",
"xbbd_start_block # --------------------------------------------------------------------- # OLE의 BBD list의 index를 Offset으로 리턴한다. # --------------------------------------------------------------------- def",
"분석 정보 # 리턴값 : [[압축 엔진 ID, 압축된 파일 이름]] # ---------------------------------------------------------------------",
"= buf[:t_off] + struct.pack('<L', pps_dir) + buf[t_off + 4:] self.__set_bblock(n, buf) if self.verbose:",
"if len(self.bbd_list_array)/4 < num_of_bbd_blocks: return False self.bbd = '' for i in range(num_of_bbd_blocks):",
"self.mm[off+self.bsize:] # --------------------------------------------------------------------- # BBD를 수정한다. # bbd : 수정된 BBD 이미지 #",
"= kernel.MASTER_PACK # 악성코드 치료 후 재압축 유무 info['sig_num'] = len(self.listvirus()) # 진단/치료",
"= False # 파일로 접근 중인가? if isinstance(input_data, types.StringType): if os.path.exists(input_data): self.isfile =",
"크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support : BBD -> BBD",
"struct.pack('<L', pps_dir) + buf[t_off + 4:] self.__set_bblock(n, buf) if self.verbose: print buf =",
"* 0x80 + buf[off+0x80:] if size is not None: t_off = off +",
"# 이전 링크 수집하기 t_num = 0 if (len(t_link) * self.ssize) < len(t_data):",
"no in root_list_array: self.root += get_bblock(self.mm, no, self.bsize) if self.verbose: open('root.dmp', 'wb').write(self.root) print",
"0) self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된 마지막 링크 이후에 존재하는",
"+ self.mm[0x4C:] else: data = struct.pack('<L', total_xbbd_num) self.mm = self.mm[:0x48] + data +",
"print get_block_link(o.pps[6]['Start'], o.sbd) # d2 = pics.read() o.close() ''' # XBBD 늘어나는 경우",
"kavutil.get_uint16(mm, 0x1e) rsize = (fsize / bsize) * bsize if fsize > rsize:",
"def format(self, filehandle, filename, filename_ex): ret = {} mm = filehandle # OLE",
"'Prev', 'Next', 'Dir', 'SB', 'Size') print ' ' + ('-' * 74) for",
"kavutil.get_uint16(self.mm, 0x20) if self.verbose: kavutil.vprint('Header') kavutil.vprint(None, 'Big Block Size', '%d' % self.bsize) kavutil.vprint(None,",
"위치 self.mm = self.mm[:off] + t_data + self.mm[off + self.bsize:] # XBBD 생성하기",
"och.append(ch) ret_str = '' for ch in och: ret_str += struct.pack('<H', ch) #",
"self.mm[t_off+4:] # print repr(self.mm[t_off:t_off+4]) # t = get_bblock(self.mm, t_no, self.bsize) # print repr(t)",
"= kavutil.get_uint32(bbd_list_array, seg*4) t_off = ((t_no + 1) * self.bsize) + (off *",
"off + 0x78 buf = buf[:t_off] + struct.pack('<L', size) + buf[t_off + 4:]",
"buf[0x4c:0x200] # 전체 bbd_list num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks",
"bbd = self.bbd for no in t_link: bbd = bbd[:no*4] + '\\xff\\xff\\xff\\xff' +",
"IndexError: pass # small block link 얻기 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) if self.verbose:",
"# num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if idx",
"self.sbd_fat[i] = n if self.verbose: open('sbd.dmp', 'wb').write(self.sbd) print kavutil.vprint('SBD') kavutil.vprint(None, 'Start Blocks', '%d'",
"/ 4): t_link.append(kavutil.get_uint32(self.bbd, i * 4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]]",
"+ bbd[(no+1)*4:] no = t_link[-1] bbd = bbd[:no * 4] + '\\xfe\\xff\\xff\\xff' +",
"t if self.verbose: open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd) # 수정된 SDB 적용하기 return ret_link #",
"----------------------------------------------------------------- # 스트림 전용 클래스 # ----------------------------------------------------------------- class Stream: def __init__(self, parent, node):",
"링크 구하기 # --------------------------------------------------------------------- def get_block_link(no, bbd_or_sbd_fat): ret = [] fat = bbd_or_sbd_fat",
"else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next'] != 0xffffffff: if self.pps[x]['Next'] in scaned_pps_node: self.pps[x]['Next'] =",
"링크를 처음 생성하므로 이전 링크가 없다. t_link = self.__modify_small_block_link(None, t_num) bbd_list_array, _, _,",
"range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i * 4) self.bbd_fat[i] = n self.small_block",
"def delete(self, del_no): del_pps = self.pps[del_no] prev_no = del_pps['Prev'] next_no = del_pps['Next'] dir_no",
"self.bsize % 0x200 != 0 or self.ssize != 0x40: # 이상 파일 정보",
"BBD link 추가 요청한다. (원본 이미지의 BBD link가 수정 됨) # old_link :",
"self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 SBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb,",
"% num_of_bbd_blocks) kavutil.vprint(None, 'XBBD Start', '%08X' % xbbd_start_block) kavutil.vprint(None, 'Num of XBBD Blocks',",
"self.ssize:] # --------------------------------------------------------------------- # OLE 영역의 특정 위치에 1개의 Big Block Overwrite하기 (내장)",
"(n % 8) * self.ssize self.mm = self.mm[:off] + t_data[i * self.ssize:(i +",
"'\\xfe\\xff\\xff\\xff' + bbd[(no + 1) * 4:] if self.verbose: open('bbd.dm3', 'wb').write(bbd) # 원래",
"KeyError: break return ret # --------------------------------------------------------------------- # OLE 블록 읽기 # --------------------------------------------------------------------- def",
"= start loop = False for x in num_list: if e + 1",
"링크가 없다. t_link = self.__modify_big_block_link(None, t_num) # Big block 영역에 bsize 만큼씩 Overwrite",
"range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) # 사용하지 않는 BBD 링크를 찾는다. free_link =",
"# SBD를 수정한다. # sbd : 수정된 SBD 이미지 # --------------------------------------------------------------------- def __modify_sbd(self,",
"no, data): off = (no + 1) * self.bsize if len(data) == self.bsize:",
"False self.bbd = '' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd",
"# --------------------------------------------------------------------- def __add_small_block_num(self, num): root = self.pps[0] r_size = root['Size'] r_no =",
"def get_bbd_list_array(buf, verbose=False): bbd_list_array = buf[0x4c:0x200] # 전체 bbd_list num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c)",
"= e return start, end def read(self): pps = self.parent.pps[self.node] sb = pps['Start']",
"t_link = old_link[-1:] + free_link[:add_num] # SBD에 링크 연결하기 else: # 이전 링크가",
"'__main__': # import zlib # o = OleFile('normal.hwp', write_mode=True, verbose=True) o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6',",
"self.mm[:t_off] + '\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:] # print repr(self.mm[t_off:t_off+4]) # t = get_bblock(self.mm, t_no,",
"= None self.pps = None self.small_block = None self.root_list_array = None self.exploit =",
"if (t_size % self.ssize) else 0) self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 #",
"삭제인지 확인 if reset_stream: size = target_pps['Size'] t = ow.write(no, '\\x00' * size)",
"Document File': val = ord(d[0x24]) ret['ff_hwp'] = {'compress': (val & 0x1 == 0x1),",
"입력값 : filehandle - 파일 핸들 # filename - 파일 이름 # filename_ex",
"0x40) if t_size != 0: # 출력시 이름이 깨질 가능성이 큼 if ord(pps[0])",
"total_bbd_num > 109: t_num = (total_bbd_num - 109) total_xbbd_num = (t_num / ((self.bsize",
"add_big_num * self.bsize) # --------------------------------------------------------------------- # BBD link 추가 요청한다. (원본 이미지의 BBD",
"1 and self.pps[x]['Type'] != 2 and len(self.pps[x]['Name']) == 0: continue except IndexError: if",
"size = (len(self.mm) / self.bsize) * self.bsize # 파일 크기 self.mm = self.mm[:size]",
"# --------------------------------------------------------------------- def parse(self): buf = self.mm[:8] if buf != 'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not",
"# 추가해야 할 Big Block 개수 self.__add_big_block_num(add_big_num) # Big Block 추가 요청 #",
"self.parent.mm[off:off + self.read_size] if self.parent.verbose: print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0, 80) return data[:size] def",
"= '' for i in range(len(bbd_list_array)/4): n = kavutil.get_uint32(bbd_list_array, i*4) self.bbd += get_bblock(self.mm,",
"# Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정,",
"seg = (t_idx / ((bsize / 4) - 1)) + (1 if (t_idx",
"sbd[(no + 1) * 4:] # SBD가 나누어 bsize 단위가 아니면 맞춘다. n",
"off + 0x48 buf = buf[:t_off] + struct.pack('<L', pps_next) + buf[t_off + 4:]",
"= kavutil.get_uint32(bbd_list_array, i * 4) data = bbd[i * self.bsize:(i + 1) *",
"# print (\"%02d : %d %s\") % (node, self.deep, name) # if self.pps[node]['Type']",
"num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) ''' # 상당히 많은 데이터가 출력되어 주석",
"x > 63: return None return ord(ct[x]) def DecodeStreamName(name): wch = [] och",
"print kavutil.HexDump().Buffer(self.root, 0, 0x80) # sbd 읽기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks =",
"t_size = len(t_data) - (len(t_link) * self.bsize) t_num = (t_size / self.bsize) +",
"# --------------------------------------------------------------------- def mkarc(self, arc_engine_id, arc_name, file_infos): if arc_engine_id == 'arc_ole': o =",
"아니면 다음 continue t = '' t += ' - ' if p['Prev']",
"except IndexError: pass # small block link 얻기 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) if",
"self.mm에 BBD 적용하기 t, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) bbd_list_array =",
"def arclist(self, filename, fileformat): file_scan_list = [] # 검사 대상 정보를 모두 가짐",
"'\\xfe\\xff\\xff\\xff' # 마지막 블록의 링크는 끝을 처리함 special_no.append(last_no) # 특수 블록 등록 last_no",
"# --------------------------------------------------------------------- def __write_data_to_big_block(self, t_data, t_link): for i, n in enumerate(t_link): off =",
"+ (1 if (len(data) % self.bsize) else 0) t_data = data + ('\\x00'",
"ret_link = free_link[:add_num] # 최종 결과의 BBD 링크 t_link = free_link[:add_num] # BBD에",
"--------------------------------------------------------------------- def listdir(self, streams=True, storages=False): ret = [] for p in self.__full_list: if",
"= o.openstream('FileHeader') d = pics.read() if d[:0x11] == 'HWP Document File': val =",
"!= 0xffffffff: if self.pps[x]['Dir'] in scaned_pps_node: self.pps[x]['Dir'] = 0xffffffff else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return",
"IndexError: if (x & 0x90900000) == 0x90900000: # CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return False",
"# 기존 PPS 정보를 얻는다 org_sb = self.pps[no]['Start'] org_size = self.pps[no]['Size'] ''' if",
"+ data + self.mm[off+self.bsize:] return True return False # --------------------------------------------------------------------- # PPS 헤더에",
"0, 80) return data[:size] def close(self): pass # ----------------------------------------------------------------- for p in self.__full_list:",
"pps in enumerate(self.pps): if pps['Prev'] == node or pps['Next'] == node or pps['Dir']",
"개수 # --------------------------------------------------------------------- def __modify_small_block_link(self, old_link, add_num): if add_num < 0: return []",
"invalid.') # print no ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd,",
"old_num_bbd = kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block = kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(self.mm, 0x48) #",
"file_infos): if arc_engine_id == 'arc_ole': o = OleFile(arc_name, write_mode=True) # , verbose=True) #",
"num_link : 필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_sbd_link(self, org_link_list, num_link):",
"Block Size', '%d' % self.bsize) kavutil.vprint(None, 'Small Block Size', '%d' % self.ssize) print",
"링크 수집하기 t_num = 0 if (len(t_link) * self.bsize) < len(t_data): # 블록",
"in t_link: sbd = sbd[:no*4] + '\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:] self.__modify_sbd(sbd) else: # SBD를",
"== 0xffffffff)] if old_link: ret_link = old_link + free_link[:add_num] # 최종 결과의 BBD",
"/ self.bsize) + (1 if (t_size % self.bsize) else 0) self.__add_big_block_num(t_num) # 필요한",
"False # --------------------------------------------------------------------- # 스트림을 연다 # --------------------------------------------------------------------- def openstream(self, name): # -----------------------------------------------------------------",
"# XBBD 처리하기 if total_bbd_num > 109: t_num = (total_bbd_num - 109) total_xbbd_num",
"OLE의 BBD list의 index를 Offset으로 리턴한다. # --------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks =",
"no = p['Node'] break else: no = -1 if no == -1: raise",
"for name in o.listdir(): file_scan_list.append(['arc_ole', name]) return file_scan_list except: pass return [] #",
"= self.mm[:t_off] + '\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:] # print repr(self.mm[t_off:t_off+4]) # t = get_bblock(self.mm,",
"# ------------------------------------------------------------------------- class KavMain: # --------------------------------------------------------------------- # init(self, plugins_path) # 플러그인 엔진을 초기화",
"False self.pps[x]['Valid'] = True if self.pps[x]['Prev'] != 0xffffffff: if self.pps[x]['Prev'] in scaned_pps_node: self.pps[x]['Prev']",
"if self.isfile: self.fp.close() if self.write_mode: open(self.fname, 'wb').write(self.mm) # --------------------------------------------------------------------- # OLE 파싱하기 #",
"= data + ('\\x00' * ((n*self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에",
"4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) # 사용하지 않는 BBD 링크를 찾는다. free_link = [i for",
"i in range(len(bbd_list_array) / 4): no = kavutil.get_uint32(bbd_list_array, i * 4) data =",
"# 추가적인 Big Block을 계산한다. BBD List와 XBBD 블록도 추가될 수 있기 때문에...",
"리턴값 : 악성코드 리스트 # --------------------------------------------------------------------- def listvirus(self): # 진단 가능한 악성코드 리스트",
"if arc_engine_id == 'arc_ole': o = OleFile(arc_name, write_mode=True) # , verbose=True) # zfile",
"d = pics.read() d = zlib.decompress(d, -15) d = d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') # var",
"= root['Start'] # SBD 링크를 생성한다. sbd_link = [] for i in range(len(self.sbd)",
"Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no,",
"+ b_data + add_data + attach_data # 특수 블록에 BBD list도 추가 special_no",
"if self.verbose: print kavutil.vprint('Property Storage') ''' print ' %-2s %-20s %4s %-8s %-8s",
"(no == 0xffffffff and i < size / self.bsize)] if len(free_link) >= num:",
"0x4C buf = buf[:t_off] + struct.pack('<L', pps_dir) + buf[t_off + 4:] self.__set_bblock(n, buf)",
"= del_pps['Next'] dir_no = del_pps['Dir'] # root를 찾기 root_no = self.__get_root_node(del_no) # 양쪽",
"수집한다. t_link = self.__modify_big_block_link(t_link, t_num) # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data,",
"# 사용하지 않는 BBD 링크를 찾는다. free_link = [i for i, no in",
"if p['Valid'] is False: # 유효한 Tree가 아니면 다음 continue t = ''",
"임시 변수 self.__deep = None self.__full_list = None self.init(buf) def init(self, buf): #",
"Big Block 번호 n = (len(self.bbd) / 4 - 1) - last_no if",
"else: while loop: if e == num_list.pop(0): break end = e break else:",
"보낸다. t_no = prev_no elif prev_no == 0xffffffff and next_no != 0xffffffff: #",
"대상 정보를 모두 가짐 # 미리 분석된 파일 포맷중에 OLE 파일 포맷이 있는가?",
"pps_next=next_no) elif prev_no != 0xffffffff and next_no == 0xffffffff: # Prev만 존재 #",
">= 0x1000: # 기존에는 BBD 사용 if org_size >= len(data): # raise error('Not",
"할 블록 수 add_data = ('\\x00' * self.bsize * add_num) # 추가해야 할",
"t_num = (t_size / self.bsize) + (1 if (t_size % self.bsize) else 0)",
"# ------------------------------------------------------------------------- # 엔진 오류 메시지를 정의 # ------------------------------------------------------------------------- class Error(Exception): pass #",
"return org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- # Big Block을 주어진 개수만큼",
"# raise error('Not Support : SBD -> SBD (Dec)') # 지원 완료 n",
"= len(self.listvirus()) # 진단/치료 가능한 악성코드 수 return info # --------------------------------------------------------------------- # listvirus(self)",
"= get_block_link(sbd_no, self.bbd) sbd_list_array = get_block_link(sbd_no, self.bbd_fat) # print sbd_list_array for i, no",
"continue else: while loop: if e == num_list.pop(0): break end = e break",
"처리함 special_no.append(last_no) # 특수 블록 등록 last_no += 1 # END of XBBD",
"return 0x4c + (idx * 4) else: t_idx = idx - 109 seg",
"ord(pps[0x42]) p['Prev'] = kavutil.get_uint32(pps, 0x44) p['Next'] = kavutil.get_uint32(pps, 0x48) p['Dir'] = kavutil.get_uint32(pps, 0x4c)",
"읽기 # --------------------------------------------------------------------- def get_bblock(buf, no, bsize): off = (no+1) * bsize return",
"(Inc)') n = (len(data) / self.bsize) + (1 if (len(data) % self.bsize) else",
"= num - n # 추가해야 할 블록 수 add_data = ('\\x00' *",
"= t_link[-1] bbd = bbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + bbd[(no + 1)",
"필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_bbd_link(self, org_link_list, num_link): if len(org_link_list)",
"return True # --------------------------------------------------------------------- # PPS Tree의 유효성을 체크한다. (내장) # --------------------------------------------------------------------- def",
"self.mm = self.mm[:t_off] + '\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:] # print repr(self.mm[t_off:t_off+4]) # t =",
"수정 if num_of_xbbd_blocks == 0: data = struct.pack('<LL', last_no, total_xbbd_num) self.mm = self.mm[:0x44]",
"list_array: div_n = self.parent.bsize / self.parent.ssize off = (self.parent.small_block[n / div_n] + 1)",
"OleFile 클래스 # --------------------------------------------------------------------- class OleFile: def __init__(self, input_data, write_mode=False, verbose=False): self.verbose =",
"write_mode=False, verbose=False): self.verbose = verbose # 디버깅용 self.isfile = False # 파일로 접근",
"= self.mm[:0x48] + data + self.mm[0x4C:] # XBBD 블록 연결 next_b = xbbd_start_block",
"# 플러그인 엔진의 주요 정보를 알려준다. (제작자, 버전, ...) # 리턴값 : 플러그인",
"+ ('-' * 74) for p in self.pps: if p['Valid'] is False: #",
"4)) + (1 if (add_num % (self.bsize / 4)) else 0) if old_b_num",
"데이터를 big block 링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_big_block(self, t_data,",
"%d %s\") % (node, self.deep, name) # if self.pps[node]['Type'] != 5: # Stream만",
"--------------------------------------------------------------------- # Big Block을 주어진 개수만큼 추가한다. # num : 추가할 Big Block",
"--------------------------------------------------------------------- def format(self, filehandle, filename, filename_ex): ret = {} mm = filehandle #",
"생성하므로 이전 링크가 없다. t_link = self.__modify_small_block_link(None, t_num) bbd_list_array, _, _, _ =",
"add_num : 추가 SBD link 개수 # --------------------------------------------------------------------- def __modify_small_block_link(self, old_link, add_num): if",
"% p['Start'] tname = p['Name'].encode(sys.stdout.encoding, 'replace') print ' ' + '%2d %-35s %d",
"Stream 목록 추출하기 o = self.__get_handle(filename) for name in o.listdir(): file_scan_list.append(['arc_ole', name]) return",
"'Name', 'Type', 'Prev', 'Next', 'Dir', 'SB', 'Size') print ' ' + ('-' *",
"% p['Next'] t += ' - ' if p['Dir'] == 0xffffffff else '%4d",
"= (no+1) * bsize return buf[off:off+bsize] # --------------------------------------------------------------------- # OLE의 BBD 리스트를 얻는다.",
"t_link): for i, n in enumerate(t_link): off = (n + 1) * self.bsize",
"buf[:t_off] + struct.pack('<L', pps_dir) + buf[t_off + 4:] self.__set_bblock(n, buf) if self.verbose: print",
"# --------------------------------------------------------------------- def __set_bblock(self, no, data): off = (no + 1) * self.bsize",
"Size', '%d' % self.bsize) kavutil.vprint(None, 'Small Block Size', '%d' % self.ssize) print kavutil.HexDump().Buffer(self.mm,",
"self.verbose = verbose self.mm = mm self.pps = pps self.bsize = bsize self.ssize",
"in self.__full_list: if p['Type'] == 2 and streams: ret.append(p['Name']) elif p['Type'] == 1",
"= {} for i in range(len(self.sbd) / 4): n = kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i]",
"bbd = bbd[:no*4] + '\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:] self.__modify_bbd(bbd) else: # 기존에는 SBD 사용",
"the value contains two characters ch -= 0x3800 och.append(MsiBase64Encode(ch & 0x3f)) ch =",
"배열을 SBD 버퍼로 바꾸기 self.sbd = '' for i in t_link: self.sbd +=",
"info['version'] = '1.1' # 버전 info['title'] = 'OLE Library' # 엔진 설명 info['kmd_name']",
"스트림 전용 클래스 # ----------------------------------------------------------------- class Stream: def __init__(self, parent, node): self.parent =",
"for i in range(len(name) / 2): wch.append(kavutil.get_uint16(name, i * 2)) for ch in",
"# 개발 완료 n = (len(data) / self.bsize) + (1 if (len(data) %",
"따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_small_bolck(self, t_data, t_link): for i, n",
"SBD (Dec)') # 지원 완료 n = (len(data) / self.ssize) + (1 if",
"작성한거라 최적화 필요함 def get_liner_value(self, num_list): start = None end = None if",
"# d2 = pics.read() o.close() ''' # XBBD 늘어나는 경우 # o =",
"no == -1: raise Error('PPS name is invalid.') return Stream(self, no) # ---------------------------------------------------------------------",
"self.verbose: print buf = get_bblock(self.mm, n, self.bsize) kavutil.HexDump().Buffer(buf, 0, 0x200) # --------------------------------------------------------------------- #",
"bbd[(no + 1) * 4:] if self.verbose: open('bbd.dm3', 'wb').write(bbd) # 원래 이미지에 BBD",
"if len(data) >= 0x1000: # BBD를 사용한다. if org_size >= 0x1000: # 기존에는",
"self.__set_pps_header(root_no, pps_next=t_no) else: # Dir self.__set_pps_header(root_no, pps_dir=t_no) # 삭제 노드 값은 모두 지우기",
"= 0xffffffff # root 노드를 수정한다. pps = self.pps[root_no] if pps['Prev'] == del_no:",
"if (add_num % (self.bsize / 4)) else 0) if old_b_num == b_num: break",
"'wb').write(bbd) bbd_link = [] for i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) #",
"# 이전 링크 수집하기 t_num = 0 if (len(t_link) * self.bsize) < len(t_data):",
"0x80) # sbd 읽기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks = kavutil.get_uint32(self.mm, 0x40) sbd_list_array",
"최종적으로 압축될 압축 파일 이름 # file_infos - 압축 대상 파일 정보 구조체",
"- 성공, 0 이외의 값 - 실패 # --------------------------------------------------------------------- def init(self, plugins_path, verbose=False):",
"return org_link_list elif len(org_link_list) == num_link: return org_link_list else: raise Error('Invalid call') #",
"f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next'] != 0xffffffff: if self.pps[x]['Next'] in scaned_pps_node: self.pps[x]['Next'] = 0xffffffff",
"Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0])",
"next_no != 0xffffffff: # Next만 존재 # 1. next 노드 값을 root로 보낸다.",
"연결하기 for i in range(len(t_link)-1): no = t_link[i+1] data = struct.pack('<L', no) no",
"self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) ''' if len(self.bbd_list_array)/4 < num_of_bbd_blocks: return False self.bbd",
"kavutil.vprint(None, 'Num of BBD Blocks', '%d' % num_of_bbd_blocks) kavutil.vprint(None, 'XBBD Start', '%08X' %",
"목적의 Big Block 번호. 해당 블록은 0xfffffffd로 처리해야 함 x_data = '' #",
"# 유효한 스토리지? t = ow.delete(no) # 링크 삭제 if t: self.init(t) #",
"struct.pack('<L', i) # self.mm에 BBD 적용하기 t, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm,",
"수집하기 t_num = 0 if (len(t_link) * self.ssize) < len(t_data): # 블록 추가해야",
"''' print ' %-2s %-20s %4s %-8s %-8s %-8s %-8s %-8s' % ('No',",
"BBD list의 index를 Offset으로 리턴한다. # --------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks = kavutil.get_uint32(buf,",
"파일 목록을 얻는다. # 입력값 : filename - 파일 이름 # fileformat -",
"진단/치료하는 악성코드 이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return vlist # --------------------------------------------------------------------- # format(self,",
"# 여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data) / self.bsize # 몇개의",
"unarc(self, arc_engine_id, arc_name, fname_in_arc) # 입력값 : arc_engine_id - 압축 엔진 ID #",
"None self.small_block = None self.root_list_array = None self.exploit = [] # 취약점 존재",
"True # --------------------------------------------------------------------- # PPS 전체 경로 구하기 (내장) # --------------------------------------------------------------------- def __get_pps_path(self,",
"= OleFile('normal.hwp', write_mode=True, verbose=True) o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True) print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') #",
"struct.pack('<L', no) no = t_link[i] sbd = sbd[:no*4] + data + sbd[(no+1)*4:] no",
"잔여 개수 체크하기 last_no = (size / self.bsize) - 2 # 실제 마지막",
"SBD 적용하기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) for i, n",
"if not ch: continue else: # 0x3800 - 0x383F # the value contains",
"SBD 사용 if org_size >= len(data): # raise error('Not Support : SBD ->",
": 악성코드 리스트 # --------------------------------------------------------------------- def listvirus(self): # 진단 가능한 악성코드 리스트 vlist",
"next_no != 0xffffffff: # 양쪽 모두 노트가 존재함 # 1. prev 노드 값을",
"SBD link가 수정 됨) # old_link : 기존 SBD link # add_num :",
"no in t_link: sbd = sbd[:no*4] + '\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:] self.__modify_sbd(sbd) else: #",
"- 2 # 실제 마지막 Big Block 번호 n = (len(self.bbd) / 4",
"대상 파일 정보 구조체 # 리턴값 : 압축 성공 여부 (True or False)",
"/ 4) - 1)) next_b = xbbd_start_block for i in range(seg): if next_b",
"be decoded ch = MsiBase64Encode(ch - 0x4800) if not ch: continue else: #",
"= p['Name'].encode(sys.stdout.encoding, 'replace') print ' ' + '%2d %-35s %d %22s %8d' %",
"t_link = free_link[:add_num] # BBD에 링크 연결하기 for i in range(len(t_link)-1): no =",
"# 특정 데이터를 small block 링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def",
"+ 0x78 buf = buf[:t_off] + struct.pack('<L', size) + buf[t_off + 4:] if",
"* ((n * self.bsize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link",
"__modify_small_block_link(self, old_link, add_num): if add_num < 0: return [] sbd = self.sbd if",
"kavutil.vprint(None, 'XBBD Start', '%08X' % xbbd_start_block) kavutil.vprint(None, 'Num of XBBD Blocks', '%d' %",
">= 0x4800: # 0x4800 - 0x483F # only one charecter can be decoded",
"info = dict() # 사전형 변수 선언 info['author'] = '<NAME>' # 제작자 info['version']",
"False while len(f): x = f.pop(0) try: if self.pps[x]['Type'] != 1 and self.pps[x]['Type']",
"self.pps[x]['Dir'] != 0xffffffff: if self.pps[x]['Dir'] in scaned_pps_node: self.pps[x]['Dir'] = 0xffffffff else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir'])",
"off = (n + 1) * self.bsize self.mm = self.mm[:off] + t_data[i *",
"# --------------------------------------------------------------------- # 특정 데이터를 small block 링크를 따라 데이터 쓰기 (내장) #",
"데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_small_bolck(self, t_data, t_link): for i, n in",
"p['Name'] = '' p['Type'] = ord(pps[0x42]) p['Prev'] = kavutil.get_uint32(pps, 0x44) p['Next'] = kavutil.get_uint32(pps,",
"BBD 링크 t_link = free_link[:add_num] # BBD에 링크 연결하기 for i in range(len(t_link)-1):",
"t_no, self.bsize) # print repr(t) # t = kavutil.get_uint32(t, off*4) # print hex(t)",
"0, 0x60) print if self.bsize % 0x200 != 0 or self.ssize != 0x40:",
"bbd_link = [] for i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) # 사용하지",
"return ret_str # --------------------------------------------------------------------- # OLE 내부 링크 구하기 # --------------------------------------------------------------------- def get_block_link(no,",
"+ (1 if (size % self.bsize) else 0) # 추가해야 할 Big Block",
"get_bblock(self.mm, n, self.bsize) off = ((node % 4) * 0x80) if del_info and",
"--------------------------------------------------------------------- # OLE의 BBD list의 index를 Offset으로 리턴한다. # --------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf, idx):",
"f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir'] != 0xffffffff: if self.pps[x]['Dir'] in scaned_pps_node: self.pps[x]['Dir'] = 0xffffffff",
"% num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd, 0, 0x80) # PPS 읽기 self.pps = [] for",
"p['Size']) # PPS 전체 경로 구하기 self.__deep = 0 self.__full_list = [] try:",
"hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no = kavutil.get_uint32(bbd_list_array, seg*4) t_off = ((t_no + 1) *",
"d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') # var -> foo d = zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d) o.close() '''",
"self.__set_pps_header(blank_next_no, pps_next=next_no) elif prev_no != 0xffffffff and next_no == 0xffffffff: # Prev만 존재",
"data = sbd[i*self.bsize:(i+1)*self.bsize] off = (no + 1) * self.bsize self.mm = self.mm[:off]",
"1)) + (1 if (t_idx % ((bsize / 4) - 1)) else 0)",
"bbd # 체크 !!! bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) for i in",
"5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] = True if len(f) == 0: # 정상적인 PPS가",
"Dir self.__set_pps_header(root_no, pps_dir=t_no) # 삭제 노드 값은 모두 지우기 self.__set_pps_header(del_no, size=0, start=0xffffffff, pps_prev=0xffffffff,",
"# 블록 추가해야 하나? t_size = len(t_data) - (len(t_link) * self.ssize) t_num =",
"return False self.pps[x]['Valid'] = True if self.pps[x]['Prev'] != 0xffffffff: if self.pps[x]['Prev'] in scaned_pps_node:",
"- 실패 # --------------------------------------------------------------------- def init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화 self.handle",
"self.verbose: open('root.dmp', 'wb').write(self.root) print kavutil.vprint('ROOT') kavutil.vprint(None, 'Start Blocks', '%d' % root_startblock) print kavutil.HexDump().Buffer(self.root,",
"self.bsize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data) /",
"xbbd_start_block = kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf,",
"포맷을 분석한다. # 입력값 : filehandle - 파일 핸들 # filename - 파일",
"get_block_link(r_no, self.bbd_fat) # 이전 Small Block의 링크를 구함 self.__modify_big_block_link(t_link, add_big_num) # 이전 링크에",
"t_data, t_link): for i, n in enumerate(t_link): off = (self.small_block[n / 8] +",
"= '' add_num = num - n # 추가해야 할 블록 수 add_data",
"- 1)) next_b = xbbd_start_block for i in range(seg): if next_b == 0xfffffffe:",
"링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.sbd) # 이전 링크 수집하기 t_link",
"in scaned_pps_node: self.pps[x]['Prev'] = 0xffffffff else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next'] != 0xffffffff: if",
"== 0x4)} except Error: pass o.close() return ret # --------------------------------------------------------------------- # __get_handle(self, filename)",
": [[압축 엔진 ID, 압축된 파일 이름]] # --------------------------------------------------------------------- def arclist(self, filename, fileformat):",
"PPS 전체 경로 구하기 (내장) # --------------------------------------------------------------------- def __get_pps_path(self, node=0, prefix=''): if node",
"0으로 Wipe t = ow.delete(no) if t: self.init(t) # 새롭게 OLE 재로딩 elif",
"self.write_mode: open(self.fname, 'wb').write(self.mm) # --------------------------------------------------------------------- # OLE 파싱하기 # --------------------------------------------------------------------- def parse(self): buf",
"성공, 0 이외의 값 - 실패 # --------------------------------------------------------------------- def init(self, plugins_path, verbose=False): #",
"last_no = (size / self.bsize) - 2 # 실제 마지막 Big Block 번호",
"큰 값임 no = pps['Next'] return no def delete(self, del_no): del_pps = self.pps[del_no]",
"'0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x > 63: return None return ord(ct[x]) def DecodeStreamName(name): wch =",
"total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수 # XBBD를 위한 헤더 수정",
"num: # 여유분이 충분히 존재함... return # 추가할 필요 없음 # 잔여 개수",
"+ b_num - 109) total_xbbd_num = (t_num / ((self.bsize - 4) / 4))",
"엔진 오류 메시지를 정의 # ------------------------------------------------------------------------- class Error(Exception): pass # --------------------------------------------------------------------- # MisiBase64",
"scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir'] != 0xffffffff: if self.pps[x]['Dir'] in scaned_pps_node: self.pps[x]['Dir'] = 0xffffffff else:",
"+ 1) * bsize + (off * 4) # --------------------------------------------------------------------- # OLE 파일인지",
"self.__set_pps_header(no, size=len(data)) return self.mm # --------------------------------------------------------------------- # 특정 데이터를 big block 링크를 따라",
">= len(data): # raise error('Not Support : SBD -> SBD (Dec)') # 지원",
"last_no, total_xbbd_num) self.mm = self.mm[:0x44] + data + self.mm[0x4C:] else: data = struct.pack('<L',",
"'Start Blocks', '%d' % sbd_startblock) kavutil.vprint(None, 'Num of SBD Blocks', '%d' % num_of_sbd_blocks)",
"# root 노드를 수정한다. pps = self.pps[root_no] if pps['Prev'] == del_no: self.__set_pps_header(root_no, pps_prev=t_no)",
"= (no + 1) * self.bsize self.mm = self.mm[:off] + data + self.mm[off",
"t_idx = idx - 109 seg = (t_idx / ((bsize / 4) -",
"가능성이 큼 if ord(pps[0]) & 0xF0 == 0x00 and ord(pps[1]) == 0x00: name",
"get_bbd_list_array(self.mm) for i in range(len(bbd_list_array) / 4): no = kavutil.get_uint32(bbd_list_array, i * 4)",
"+ data + self.mm[0x4C:] else: data = struct.pack('<L', total_xbbd_num) self.mm = self.mm[:0x48] +",
"continue except IndexError: if (x & 0x90900000) == 0x90900000: # CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820')",
"!= 0xffffffff and next_no == 0xffffffff: # Prev만 존재 # 1. prev 노드",
"xbbd_start_block) kavutil.vprint(None, 'Num of XBBD Blocks', '%d' % num_of_xbbd_blocks) if num_of_bbd_blocks > 109:",
"(no+1) * bsize return buf[off:off+bsize] # --------------------------------------------------------------------- # OLE의 BBD 리스트를 얻는다. #",
"%-8s %-8s %-8s' % ('No', 'Name', 'Type', 'Prev', 'Next', 'Dir', 'SB', 'Size') print",
"else '%4d ' % p['Next'] t += ' - ' if p['Dir'] ==",
"for p in self.pps: print ' ' + '%2d %-23s %d %8X %8X",
"0x44 buf = buf[:t_off] + struct.pack('<L', pps_prev) + buf[t_off + 4:] if pps_next",
"write_mode=True) # , verbose=True) # zfile = zipfile.ZipFile(arc_name, 'w') for file_info in file_infos:",
"name) # self.init(self.mm) # return ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat,",
"'\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if pps[0x50:0x60] in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return False self.pps.append(p) # PPS",
"할 Big Block 개수 self.__add_big_block_num(add_big_num) # Big Block 추가 요청 # t_link =",
"-1: raise Error('PPS name is invalid.') # print no ow = OleWriteStream(self.mm, self.pps,",
"데이터를 0으로 Wipe t = ow.delete(no) if t: self.init(t) # 새롭게 OLE 재로딩",
"else '%4d ' % p['Dir'] t += ' - ' if p['Start'] ==",
"# OLE의 BBD list의 index를 Offset으로 리턴한다. # --------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks",
"('\\x00' * self.bsize * add_num) # 추가해야 할 BBD list 개수는 한개의 BBD에는",
"prev_no # 2. prev 노드 하위에 next가 없는 node를 찾아서 del_pps의 next_no를 등록한다.",
"+ t_data[i * self.ssize:(i + 1) * self.ssize] + self.mm[off + self.ssize:] #",
"= 0 self.__full_list = [] self.parse() # OLE 파일을 분석 def close(self): if",
"p['Next'] == 0xffffffff else '%4d ' % p['Next'] t += ' - '",
"%-2s %-32s %4s %-4s %-4s %-4s %8s %8s' % ('No', 'Name', 'Type', 'Prev',",
"t_num = len(t_data) / self.bsize # 몇개의 블록이 필요한가? self.__add_big_block_num(t_num) # 필요한 블록",
"= get_block_link(sbd_startblock, self.bbd_fat) self.sbd = '' for no in sbd_list_array: self.sbd += get_bblock(self.mm,",
"OLE 파일을 분석 def close(self): if self.isfile: self.fp.close() if self.write_mode: open(self.fname, 'wb').write(self.mm) #",
"/ 4] buf = get_bblock(self.mm, n, self.bsize) off = ((node % 4) *",
"'replace') print ' ' + '%2d %-35s %d %22s %8d' % (self.pps.index(p), tname,",
"x_data + b_data + add_data + attach_data # 특수 블록에 BBD list도 추가",
"0x3800 - 0x383F # the value contains two characters ch -= 0x3800 och.append(MsiBase64Encode(ch",
"self.ssize) t_num = (t_size / self.ssize) + (1 if (t_size % self.ssize) else",
"0xffffffff)] if old_link: ret_link = old_link + free_link[:add_num] # 최종 결과의 SBD 링크",
"# 압축 파일 핸들을 닫는다. # --------------------------------------------------------------------- def arcclose(self): for fname in self.handle.keys():",
"# Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정",
"정의 # ------------------------------------------------------------------------- class Error(Exception): pass # --------------------------------------------------------------------- # MisiBase64 인코더 디코더 #",
"추가하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_sbd_link(t_link, n)",
"self.root = None self.pps = None self.small_block = None self.root_list_array = None self.exploit",
"--------------------------------------------------------------------- def __add_small_block_num(self, num): root = self.pps[0] r_size = root['Size'] r_no = root['Start']",
"# 임시 변수 self.__deep = None self.__full_list = None self.init(buf) def init(self, buf):",
"파일 내부 파일 이름 # 리턴값 : {파일 포맷 분석 정보} or None",
"else: no = -1 if no == -1: raise Error('PPS name is invalid.')",
"for i, no in enumerate(bbd_no): off = get_bbd_list_index_to_offset(self.mm, old_num_bbd + i) # print",
"연속된 숫자 값을 리턴한다. # TODO : 임시로 작성한거라 최적화 필요함 def get_liner_value(self,",
"self.root += get_bblock(self.mm, no, self.bsize) if self.verbose: open('root.dmp', 'wb').write(self.root) print kavutil.vprint('ROOT') kavutil.vprint(None, 'Start",
"= kavutil.get_uint32(t, off*4) # print hex(t) # BBD List에 BBD 등록하기 for i,",
"-*- coding:utf-8 -*- # Author: <NAME>(<EMAIL>) import os import sys import struct import",
"0x78) p['Valid'] = False # CVE-2012-0158 검사하기 # pps에 ListView.2의 CLSID가 존재함 #",
"== self.bsize: self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] return True return False",
"(len(data) % self.ssize) else 0) t_data = data + ('\\x00' * ((n*self.ssize) -",
"open(filename, 'rb').read(8) if buf == 'D0CF11E0A1B11AE1'.decode('hex'): return True except IOError: pass return False",
"= d + d o.write_stream('FileHeader', d) o.close() ''' ''' # case1 o =",
"'\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] = 'OLE' # OLE 뒤에 첨부된 파일이 있는지를 조사한다. fsize =",
"for i in range(x_num): x_data += '\\xff\\xff\\xff\\xff' * ((self.bsize/4) - 1) if i",
"scaned_pps_node = [0] # 이미 분석한 노드의 경우 더이상 분석하지 않기 위해 처리",
"ret # --------------------------------------------------------------------- # 스트림이 존재하는가? # --------------------------------------------------------------------- def exists(self, name): for p",
"+= t_data[:-4] next_b = kavutil.get_uint32(t_data, bsize-4) return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block # ---------------------------------------------------------------------",
"t_data[i * self.ssize:(i + 1) * self.ssize] + self.mm[off + self.ssize:] # ---------------------------------------------------------------------",
"raise Error('Not Ole signature') # big block, small bloc 크기 구하기 self.bsize =",
"!!! bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) for i in range(len(bbd_list_array) / 4):",
"재로딩 elif target_pps['Valid'] and target_pps['Type'] == 1 and delete_storage: # 유효한 스토리지? t",
"t_no = next_no else: # prev_no == 0xffffffff and next_no == 0xffffffff: #",
"= file_info.get_filename() a_name = file_info.get_filename_in_archive() try: if os.path.exists(rname): with open(rname, 'rb') as fp:",
"next_b = kavutil.get_uint32(t_data, self.bsize-4) # 기존 XBBD 마지막에 새로운 XBBD 링크 추가 t_data",
"fat[next_b] if next_b == 0xfffffffe: break if len(ret) % 10000 == 0: if",
"/ 0x80): p = {} pps = self.root[i*0x80:(i+1)*0x80] t_size = min(kavutil.get_uint16(pps, 0x40), 0x40)",
"새롭게 생성 # Root 크기 수정 self.__set_pps_header(0, size=r_size + add_big_num * self.bsize) #",
"node): self.parent = parent self.node = node self.read_size = 0 self.fat = None",
"get_liner_value(self, num_list): start = None end = None if not start: start =",
"for i in t_link: self.sbd += struct.pack('<L', i) # self.mm에 SBD 적용하기 sbd_startblock",
"= n if self.verbose: open('sbd.dmp', 'wb').write(self.sbd) print kavutil.vprint('SBD') kavutil.vprint(None, 'Start Blocks', '%d' %",
"# small block link 얻기 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) if self.verbose: print kavutil.vprint('Small",
"모든 데이터를 0으로 Wipe t = ow.delete(no) if t: self.init(t) # 새롭게 OLE",
"'Prev', 'Next', ' Dir', 'SB', 'Size') print ' ' + ('-' * 74)",
": PPS 인덱스 # size : 설정 크기 # start : 시작 링크",
"- s + 1)] else: for n in list_array: div_n = self.parent.bsize /",
"--------------------------------------------------------------------- # SBD 링크를 줄인다 # org_link_list : 기존 Small block 링크 #",
"# --------------------------------------------------------------------- def __get_handle(self, filename): if filename in self.handle: # 이전에 열린 핸들이",
"filename - 파일 이름 # fileformat - 파일 포맷 분석 정보 # 리턴값",
">= num: # 여유분이 충분히 존재함... return # 추가할 필요 없음 # 잔여",
"self.fp.read() else: buf = input_data else: raise Error('Input data is invalid.') # 수정",
"재압축 유무 info['sig_num'] = len(self.listvirus()) # 진단/치료 가능한 악성코드 수 return info #",
"r_size / self.ssize)] if len(free_link) >= num: # 여유분이 충분히 존재함... return #",
"pps = self.pps[no] if pps['Next'] == 0xffffffff: # 더이상 오른쪽이 없으면 탐색 종료",
"None return data # --------------------------------------------------------------------- # arcclose(self) # 압축 파일 핸들을 닫는다. #",
"(next_b+1) * self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) ''' if len(self.bbd_list_array)/4 < num_of_bbd_blocks: return",
"필요한 블록 수 추가하기 # SBD 링크를 처음 생성하므로 이전 링크가 없다. t_link",
"찾아서 del_pps의 next_no를 등록한다. blank_next_no = self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no) elif prev_no != 0xffffffff",
"try: buf = open(filename, 'rb').read(8) if buf == 'D0CF11E0A1B11AE1'.decode('hex'): return True except IOError:",
"4)) else 0) if old_b_num == b_num: break else: old_b_num = b_num total_bbd_num",
"fname_in_arc) # 입력값 : arc_engine_id - 압축 엔진 ID # arc_name - 압축",
"t_link: self.sbd += struct.pack('<L', i) # self.mm에 SBD 적용하기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c)",
"# 엔진 설명 info['kmd_name'] = 'ole' # 엔진 파일 이름 info['make_arc_type'] = kernel.MASTER_PACK",
"if self.bsize % 0x200 != 0 or self.ssize != 0x40: # 이상 파일",
"self.handle.get(filename, None) else: zfile = OleFile(filename, verbose=self.verbose) # ole 파일 열기 self.handle[filename] =",
"읽기 self.pps = [] for i in range(len(self.root) / 0x80): p = {}",
"num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block # --------------------------------------------------------------------- # OLE의 BBD list의 index를 Offset으로 리턴한다. #",
"# BBD link 추가 요청한다. (원본 이미지의 BBD link가 수정 됨) # old_link",
"= ow.write(no, data) if t: self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- #",
"i in t[1:]: t_link[i] = 0xffffffff # BBD 배열을 BBD 버퍼로 바꾸기 self.bbd",
"% (self.bsize/4)) else 0) old_num_bbd = kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block = kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks",
"verbose=self.verbose) # ole 파일 열기 self.handle[filename] = zfile return zfile # --------------------------------------------------------------------- #",
"t_off = ((t_no + 1) * self.bsize) + (off * 4) self.mm =",
"{} pps = self.root[i*0x80:(i+1)*0x80] t_size = min(kavutil.get_uint16(pps, 0x40), 0x40) if t_size != 0:",
"크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 SBD의 링크는 모두",
"[] # --------------------------------------------------------------------- # unarc(self, arc_engine_id, arc_name, fname_in_arc) # 입력값 : arc_engine_id -",
"True continue else: while loop: if e == num_list.pop(0): break end = e",
"while len(f): x = f.pop(0) try: if self.pps[x]['Type'] != 1 and self.pps[x]['Type'] !=",
"if p['Next'] == 0xffffffff else '%4d ' % p['Next'] t += ' -",
"Support : SBD -> SBD (Inc)') # 작업 완료 n = (len(data) /",
"i in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) if",
"__add_big_block_num(self, num): size = (len(self.mm) / self.bsize) * self.bsize # 파일 크기 self.mm",
"size is not None: t_off = off + 0x78 buf = buf[:t_off] +",
"self.bsize self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] # --------------------------------------------------------------------- # BBD를 수정한다.",
"4 개수만큼 Big Block을 담을 수 있음 b_num = (add_num / (self.bsize/4)) +",
"b_num total_bbd_num = old_num_bbd + b_num # 전체 BBD list 개수 self.mm =",
"sbd[:no*4] + data + sbd[(no+1)*4:] no = t_link[-1] sbd = sbd[:no * 4]",
"# 디버깅용 self.isfile = False # 파일로 접근 중인가? if isinstance(input_data, types.StringType): if",
"# 리턴값 : 압축 해제된 내용 or None # --------------------------------------------------------------------- def unarc(self, arc_engine_id,",
"# 리턴값 : 압축 파일 핸들 # --------------------------------------------------------------------- def __get_handle(self, filename): if filename",
"n if self.verbose: open('sbd.dmp', 'wb').write(self.sbd) print kavutil.vprint('SBD') kavutil.vprint(None, 'Start Blocks', '%d' % sbd_startblock)",
"0 self.ssize = 0 # 임시 변수 self.__deep = 0 self.__full_list = []",
"get_bbd_list_array(self.mm, self.verbose) bbd_list_array = [] for i in range(len(t) / 4): bbd_list_array.append(kavutil.get_uint32(t, i",
"if len(free_link) >= num: # 여유분이 충분히 존재함... return # 추가할 필요 없음",
"== del_no: self.__set_pps_header(root_no, pps_prev=t_no) elif pps['Next'] == del_no: self.__set_pps_header(root_no, pps_next=t_no) else: # Dir",
"109: t_num = (old_num_bbd + b_num - 109) total_xbbd_num = (t_num / ((self.bsize",
"% (self.bsize / 4)) else 0) if old_b_num == b_num: break else: old_b_num",
"= self.parent.ssize self.fat = self.parent.sbd_fat list_array = get_block_link(sb, self.fat) data = '' if",
"else: raise Error('Invalid call') # --------------------------------------------------------------------- # Big Block을 주어진 개수만큼 추가한다. #",
"%-8s %-8s' % ('No', 'Name', 'Type', 'Prev', 'Next', 'Dir', 'SB', 'Size') print '",
"[] for i in range(len(self.sbd) / 4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) # 사용하지 않는 SBD",
"구조체 # 리턴값 : 압축 성공 여부 (True or False) # --------------------------------------------------------------------- def",
"True self.fname = input_data self.fp = open(input_data, 'rb') buf = self.fp.read() else: buf",
"in enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return org_link_list else:",
"raise Error('Invalid call') # --------------------------------------------------------------------- # BBD 링크를 줄인다 # org_link_list : 기존",
"t_link.append(kavutil.get_uint32(self.bbd, i * 4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]] = 0xfffffffe",
"for i in t_link: self.bbd += struct.pack('<L', i) # self.mm에 BBD 적용하기 t,",
"마지막 링크 이후에 존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_big_block_link(t_link, t_num) #",
"수 # --------------------------------------------------------------------- def __decrease_bbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: # BBD를",
"{} self.sbd = None self.root = None self.pps = None self.small_block = None",
"= sbd[i*self.bsize:(i+1)*self.bsize] off = (no + 1) * self.bsize self.mm = self.mm[:off] +",
"class KavMain: # --------------------------------------------------------------------- # init(self, plugins_path) # 플러그인 엔진을 초기화 한다. #",
"치료 후 재압축 유무 info['sig_num'] = len(self.listvirus()) # 진단/치료 가능한 악성코드 수 return",
"+ (1 if (add_num % (self.bsize / 4)) else 0) if old_b_num ==",
"file_scan_list = [] # 검사 대상 정보를 모두 가짐 # 미리 분석된 파일",
"노드가 큰 값임 no = pps['Next'] return no def delete(self, del_no): del_pps =",
"num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize",
"if self.parent.verbose: print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0, 80) return data[:size] def close(self): pass #",
"정보를 가진 root를 찾기 for i, pps in enumerate(self.pps): if pps['Prev'] == node",
"+ data + self.mm[off+self.bsize:] # --------------------------------------------------------------------- # BBD를 수정한다. # bbd : 수정된",
"- 디버그 모드 (True or False) # 리턴값 : 0 - 성공, 0",
"+ struct.pack('<L', pps_next) + buf[t_off + 4:] if pps_dir is not None: t_off",
"kavutil.get_uint32(self.mm, 0x30) root_list_array = get_block_link(root_startblock, self.bbd_fat) self.root_list_array = root_list_array self.root = '' for",
"4): n = kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i] = n if self.verbose: open('sbd.dmp', 'wb').write(self.sbd) print",
"self.bsize) # print repr(t) # t = kavutil.get_uint32(t, off*4) # print hex(t) #",
"open('bbd.dm3', 'wb').write(bbd) # 원래 이미지에 BBD 덮어쓰기 self.__modify_bbd(bbd) return ret_link # 연결된 링크",
"= kavutil.get_uint32(pps, 0x4c) p['Start'] = kavutil.get_uint32(pps, 0x74) p['Size'] = kavutil.get_uint32(pps, 0x78) p['Valid'] =",
"* t if self.verbose: open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd) # 수정된 SDB 적용하기 return ret_link",
"OleWriteStream 클래스 # --------------------------------------------------------------------- class OleWriteStream: def __init__(self, mm, pps, bsize, ssize, bbd,",
"kavutil.get_uint32(bbd_list_array, i*4) self.bbd += get_bblock(self.mm, n, self.bsize) # 새로운 Small Block 링크가 필요하다",
"0) if old_b_num == b_num: break else: old_b_num = b_num total_bbd_num = old_num_bbd",
"+ self.mm[0x30:] last_no += 1 # XBBD 처리하기 if total_bbd_num > 109: t_num",
"f = [] if len(self.pps) == 0: # 분석된 PPS가 없으면 종료 return",
"요청한다. (원본 이미지의 BBD link가 수정 됨) # old_link : 기존 BBD link",
"경로 구하기 self.__deep = 0 self.__full_list = [] try: self.__get_pps_path() except IndexError: pass",
"verbose=True) # o.test() ''' # 늘어나는건 경우의 수가 너무 많음 o = OleFile('normal.hwp',",
"> num_link: # SBD를 배열로 바꾸기 t_link = [] for i in range(len(self.sbd)",
"= xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(buf, next_b, bsize) bbd_list_array +=",
"self.bbd_fat) # print sbd_list_array for i, no in enumerate(sbd_list_array): data = sbd[i*self.bsize:(i+1)*self.bsize] off",
": 추가 SBD link 개수 # --------------------------------------------------------------------- def __modify_small_block_link(self, old_link, add_num): if add_num",
"# add_num : 추가 SBD link 개수 # --------------------------------------------------------------------- def __modify_small_block_link(self, old_link, add_num):",
"= self.parent.sbd_fat list_array = get_block_link(sb, self.fat) data = '' if size >= 0x1000:",
"== 0x180: buf = buf[:off] + '\\x00' * 0x80 elif del_info: buf =",
"링크 연결하기 else: # 이전 링크가 없다면... ret_link = free_link[:add_num] # 최종 결과의",
"= [] for i in range(len(self.root) / 0x80): p = {} pps =",
"설명 info['kmd_name'] = 'ole' # 엔진 파일 이름 info['make_arc_type'] = kernel.MASTER_PACK # 악성코드",
"0x74) p['Size'] = kavutil.get_uint32(pps, 0x78) p['Valid'] = False # CVE-2012-0158 검사하기 # pps에",
"bsize / 4 개수만큼 Big Block을 담을 수 있음 b_num = (add_num /",
"= get_block_link(r_no, self.bbd) # 이전 Small Block의 링크를 구함 t_link = get_block_link(r_no, self.bbd_fat)",
"read_size = self.ssize fat = self.sbd # org_list_array = get_block_link(org_sb, fat) ''' #",
"n) # 필요한 개수로 링크 줄이기 # Big block 영역에 bsize 만큼씩 Overwrite",
"전용 클래스 # ----------------------------------------------------------------- class Stream: def __init__(self, parent, node): self.parent = parent",
"last_no += 1 # 최종 조합 self.mm += x_data + b_data + add_data",
"덮어쓴다. # --------------------------------------------------------------------- def write_stream(self, name, data): for p in self.__full_list: if p['Name']",
"== 'arc_ole': o = OleFile(arc_name, write_mode=True) # , verbose=True) # zfile = zipfile.ZipFile(arc_name,",
"arc_engine_id == 'arc_ole': o = self.__get_handle(arc_name) fp = o.openstream(fname_in_arc) try: data = fp.read()",
"enumerate(t_link): off = (self.small_block[n / 8] + 1) * self.bsize off += (n",
"찾기 root_no = self.__get_root_node(del_no) # 양쪽 노드가 존재하는가? if prev_no != 0xffffffff and",
"buf) else: # 삭제 처리 o.delete(a_name) except IOError: # print file_info.get_filename_in_archive() pass o.close()",
"전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) # BBD를 모은다",
"(원본 이미지의 BBD link가 수정 됨) # old_link : 기존 BBD link #",
"enumerate(sbd_link) if (no == 0xffffffff and i < r_size / self.ssize)] if len(free_link)",
"0: # 정상적인 PPS가 없음 return False while len(f): x = f.pop(0) try:",
"+ self.mm[off+4:]) # --------------------------------------------------------------------- # Small Block을 주어진 개수만큼 추가한다. # num :",
"0xffffffff else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return True # --------------------------------------------------------------------- # PPS 전체 경로 구하기",
"% ((self.bsize - 4) / 4)) else 0) x_num = total_xbbd_num - num_of_xbbd_blocks",
"= \\ get_bbd_list_array(self.mm, self.verbose) bbd_list_array = [] for i in range(len(t) / 4):",
"마지막 블록의 링크는 끝을 처리함 special_no.append(last_no) # 특수 블록 등록 last_no += 1",
"0xffffffff: if self.pps[x]['Dir'] in scaned_pps_node: self.pps[x]['Dir'] = 0xffffffff else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return True",
"참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E',",
"pps_dir=t_no) # 삭제 노드 값은 모두 지우기 self.__set_pps_header(del_no, size=0, start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff,",
"값을 root로 보낸다. t_no = 0xffffffff # root 노드를 수정한다. pps = self.pps[root_no]",
"--------------------------------------------------------------------- # BBD link 추가 요청한다. (원본 이미지의 BBD link가 수정 됨) #",
"size=None, start=None, pps_prev=None, pps_next=None, pps_dir=None, del_info=False): n = self.root_list_array[node / 4] buf =",
"= OleFile(filename) try: pics = o.openstream('FileHeader') d = pics.read() if d[:0x11] == 'HWP",
"추가하려는 개수보다 많거나 같으면 추가 블록 개수만 파일 뒤에 추가하기 self.mm += '\\x00'",
"self.sbd_fat = {} for i in range(len(self.sbd) / 4): n = kavutil.get_uint32(self.sbd, i*4)",
"len(f): x = f.pop(0) try: if self.pps[x]['Type'] != 1 and self.pps[x]['Type'] != 2",
"if e == num_list.pop(0): break end = e break else: for i in",
"# 제작자 info['version'] = '1.1' # 버전 info['title'] = 'OLE Library' # 엔진",
"len(org_link_list) == num_link: return org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- # Big",
"and i < r_size / self.ssize)] if len(free_link) >= num: # 여유분이 충분히",
"% num_of_xbbd_blocks) if num_of_bbd_blocks > 109: # bbd list 개수가 109보다 크면 xbbd를",
"KavMain: # --------------------------------------------------------------------- # init(self, plugins_path) # 플러그인 엔진을 초기화 한다. # 인력값",
"''' if len(self.bbd_list_array)/4 < num_of_bbd_blocks: return False self.bbd = '' for i in",
"ret_str # --------------------------------------------------------------------- # OLE 내부 링크 구하기 # --------------------------------------------------------------------- def get_block_link(no, bbd_or_sbd_fat):",
"4) * 0x80) if del_info and off == 0x180: buf = buf[:off] +",
"# 플러그인 엔진 초기화 self.handle = {} self.verbose = verbose return 0 #",
"rsize: fileformat = { # 포맷 정보를 담을 공간 'Attached_Pos': rsize, 'Attached_Size': fsize",
"if org_size >= len(data): # raise error('Not Support : BBD -> BBD (Dec)')",
"no in enumerate(bbd_no): off = get_bbd_list_index_to_offset(self.mm, old_num_bbd + i) # print hex(off) self.mm",
"# --------------------------------------------------------------------- # PPS 헤더에 존재하는 특정 스트림의 크기를 조정한다. (내장) # node",
"kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block = kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(self.mm, 0x48) # 추가적인 Big",
"파일의 핸들을 얻는다. # 입력값 : filename - 파일 이름 # 리턴값 :",
"zfile = OleFile(filename, verbose=self.verbose) # ole 파일 열기 self.handle[filename] = zfile return zfile",
"t_link = get_block_link(r_no, self.bbd_fat) # 이전 Small Block의 링크를 구함 self.__modify_big_block_link(t_link, add_big_num) #",
"kavutil.get_uint32(pps, 0x78) p['Valid'] = False # CVE-2012-0158 검사하기 # pps에 ListView.2의 CLSID가 존재함",
"else: p['Name'] = '' p['Type'] = ord(pps[0x42]) p['Prev'] = kavutil.get_uint32(pps, 0x44) p['Next'] =",
"- n # 추가해야 할 블록 수 add_data = ('\\x00' * self.bsize *",
"클래스 # ------------------------------------------------------------------------- class KavMain: # --------------------------------------------------------------------- # init(self, plugins_path) # 플러그인 엔진을",
"* ((n*self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 # t_link =",
"값을 리턴한다. # TODO : 임시로 작성한거라 최적화 필요함 def get_liner_value(self, num_list): start",
"if no == -1: raise Error('PPS name(%s) is invalid.' % name) # self.init(self.mm)",
"- 1)) + (1 if (t_idx % ((bsize / 4) - 1)) else",
"요청한다. (원본 이미지의 SBD link가 수정 됨) # old_link : 기존 SBD link",
"(\"%02d : %d %s\") % (node, self.deep, name) # if self.pps[node]['Type'] != 5:",
"포맷이 있는가? if 'ff_ole' in fileformat: try: # OLE Stream 목록 추출하기 o",
"모드 self.write_mode = write_mode # OLE 주요 데이터 self.mm = None self.bsize =",
"[] for p in self.__full_list: if p['Type'] == 2 and streams: ret.append(p['Name']) elif",
"del_info=True) return self.mm def write(self, no, data): # 기존 PPS 정보를 얻는다 org_sb",
": SBD -> SBD (Dec)') # 지원 완료 n = (len(data) / self.ssize)",
"== name: no = p['Node'] break else: no = -1 if no ==",
"e = start loop = False for x in num_list: if e +",
"= old_link + free_link[:add_num] # 최종 결과의 BBD 링크 t_link = old_link[-1:] +",
"i*4) bbd += get_bblock(self.mm, no, self.bsize) bbd_link = [] for i in range(len(bbd)",
"i in range(len(self.bbd) / 4): t_link.append(kavutil.get_uint32(self.bbd, i * 4)) t = org_link_list[num_link:] org_link_list",
"= '1.1' # 버전 info['title'] = 'OLE Library' # 엔진 설명 info['kmd_name'] =",
"self.__set_pps_header(no, size=len(data)) else: # 기존에는 SBD 사용 # raise error('Not Support : SBD",
"다음 블록을 가리켜야 함으로 1를 더함 else: x_data += '\\xfe\\xff\\xff\\xff' # 마지막 블록의",
"sbd = sbd[:no*4] + '\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:] self.__modify_sbd(sbd) else: # SBD를 사용한다. if",
"if p['Type'] == 2 and streams: ret.append(p['Name']) elif p['Type'] == 1 and storages:",
"r_no = root['Start'] # SBD 링크를 생성한다. sbd_link = [] for i in",
"0xffffffff and next_no == 0xffffffff: # Prev만 존재 # 1. prev 노드 값을",
"''' if org_size >= 0x1000: # read_size = self.bsize fat = self.bbd else:",
"# print self.parent.verbose # 연속된 숫자 값을 리턴한다. # TODO : 임시로 작성한거라",
"mm, pps, bsize, ssize, bbd, bbd_fat, sbd, sbd_fat, root_list_array, small_block, verbose): self.verbose =",
"0x48 buf = buf[:t_off] + struct.pack('<L', pps_next) + buf[t_off + 4:] if pps_dir",
"# Prev만 존재 # 1. prev 노드 값을 root로 보낸다. t_no = prev_no",
"0xffffffff else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next'] != 0xffffffff: if self.pps[x]['Next'] in scaned_pps_node: self.pps[x]['Next']",
"0, 0x80) # PPS 읽기 self.pps = [] for i in range(len(self.root) /",
"end = e return start, end def read(self): pps = self.parent.pps[self.node] sb =",
"파일 포맷이 있는가? if 'ff_ole' in fileformat: try: # OLE Stream 목록 추출하기",
"--------------------------------------------------------------------- # 스트림이 존재하는가? # --------------------------------------------------------------------- def exists(self, name): for p in self.__full_list:",
"break return ret # --------------------------------------------------------------------- # OLE 블록 읽기 # --------------------------------------------------------------------- def get_bblock(buf,",
"= OleFile('normal.hwp', verbose=True) pics = o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'], o.sbd) # d2 = pics.read()",
"수집하기 t_num = 0 if (len(t_link) * self.bsize) < len(t_data): # 블록 추가해야",
"/ 4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link = [i",
"ret['ff_hwp'] = {'compress': (val & 0x1 == 0x1), 'encrypt': (val & 0x2 ==",
"attach_data = self.mm[size:] # 파일 뒤에 붙어 있는 잔여 데이터 # 전체 BBD",
"None self.root_list_array = None self.exploit = [] # 취약점 존재 여부 # 임시",
"= pics.read() d = d + d o.write_stream('FileHeader', d) o.close() ''' ''' #",
"write(self, no, data): # 기존 PPS 정보를 얻는다 org_sb = self.pps[no]['Start'] org_size =",
"# KavMain 클래스 # ------------------------------------------------------------------------- class KavMain: # --------------------------------------------------------------------- # init(self, plugins_path) #",
"''' # 상당히 많은 데이터가 출력되어 주석 처리 if self.verbose: print if num_of_bbd_blocks",
"처음 생성하므로 이전 링크가 없다. t_link = self.__modify_big_block_link(None, t_num) # Big block 영역에",
"4): t_link.append(kavutil.get_uint32(self.sbd, i * 4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]] =",
"이전 링크에 필요한 블록 수 추가하여 링크를 새롭게 생성 # Root 크기 수정",
"= old_link[-1:] + free_link[:add_num] # BBD에 링크 연결하기 else: # 이전 링크가 없다면...",
"verbose=True) o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True) print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') # Root 수정, Next",
"bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정, start 블록 수정 self.__set_pps_header(no,",
"> 109: t_num = (old_num_bbd + b_num - 109) total_xbbd_num = (t_num /",
"= (no + 1) * self.bsize if len(data) == self.bsize: self.mm = self.mm[:off]",
"if ch >= 0x4800: # 0x4800 - 0x483F # only one charecter can",
"rsize = (fsize / bsize) * bsize if fsize > rsize: fileformat =",
"Block Size', '%d' % self.ssize) print kavutil.HexDump().Buffer(self.mm, 0, 0x60) print if self.bsize %",
"수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 SBD의 링크는 모두 삭제한다. # t_link =",
"start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True) return self.mm def write(self, no, data): # 기존",
"* 4) data = bbd[i * self.bsize:(i + 1) * self.bsize] off =",
"%8s' % ('No', 'Name', 'Type', 'Prev', 'Next', ' Dir', 'SB', 'Size') print '",
"Tree가 아니면 다음 continue t = '' t += ' - ' if",
"= kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i] = n if self.verbose: open('sbd.dmp', 'wb').write(self.sbd) print kavutil.vprint('SBD') kavutil.vprint(None,",
"if next_b == 0xfffffffe: break if len(ret) % 10000 == 0: if next_b",
"# read_size = self.ssize fat = self.sbd # org_list_array = get_block_link(org_sb, fat) '''",
"= (size / self.bsize) + (1 if (size % self.bsize) else 0) #",
"= '' p['Type'] = ord(pps[0x42]) p['Prev'] = kavutil.get_uint32(pps, 0x44) p['Next'] = kavutil.get_uint32(pps, 0x48)",
"수정 o.close() ''' o = OleFile('normal.hwp', verbose=True) pics = o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'], o.sbd)",
"__modify_bbd(self, bbd): self.bbd = bbd # 체크 !!! bbd_list_array, _, _, _ =",
"'Size') print ' ' + ('-' * 74) for p in self.pps: print",
"bbd_fat, sbd, sbd_fat, root_list_array, small_block, verbose): self.verbose = verbose self.mm = mm self.pps",
"och: ret_str += struct.pack('<H', ch) # print ret_str.decode('UTF-16LE', 'replace') return ret_str # ---------------------------------------------------------------------",
"fileformat) # 압축 파일 내부의 파일 목록을 얻는다. # 입력값 : filename -",
"-> BBD (Dec)') # 개발 완료 n = (len(data) / self.bsize) + (1",
"> rsize: fileformat = { # 포맷 정보를 담을 공간 'Attached_Pos': rsize, 'Attached_Size':",
"self.read_size * (e - s + 1)] else: for n in list_array: div_n",
"t = ow.delete(no) if t: self.init(t) # 새롭게 OLE 재로딩 elif target_pps['Valid'] and",
"특정 위치에 1개의 Big Block Overwrite하기 (내장) # --------------------------------------------------------------------- def __set_bblock(self, no, data):",
"# --------------------------------------------------------------------- def __set_pps_header(self, node, size=None, start=None, pps_prev=None, pps_next=None, pps_dir=None, del_info=False): n =",
"체크한다. (내장) # --------------------------------------------------------------------- def __valid_pps_tree(self): scaned_pps_node = [0] # 이미 분석한 노드의",
"name]) return file_scan_list except: pass return [] # --------------------------------------------------------------------- # unarc(self, arc_engine_id, arc_name,",
"in range(b_num): bbd_no.append(last_no) last_no += 1 # 최종 조합 self.mm += x_data +",
"Small block 갱신 self.bbd_fat = {} for i in range(len(self.bbd) / 4): n",
"total_xbbd_num) self.mm = self.mm[:0x44] + data + self.mm[0x4C:] else: data = struct.pack('<L', total_xbbd_num)",
"print if num_of_bbd_blocks < 109: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 4) else: kavutil.HexDump().Buffer(self.mm, 0x4c,",
"# 플러그인 엔진을 종료한다. # 리턴값 : 0 - 성공, 0 이외의 값",
"return ord(ct[x]) def DecodeStreamName(name): wch = [] och = [] for i in",
"if total_bbd_num > 109: t_num = (total_bbd_num - 109) total_xbbd_num = (t_num /",
"free_link[:add_num] # BBD에 링크 연결하기 for i in range(len(t_link)-1): no = t_link[i+1] data",
"for i, n in enumerate(t_link): off = (self.small_block[n / 8] + 1) *",
"bbd_no.append(last_no) last_no += 1 # 최종 조합 self.mm += x_data + b_data +",
"기존 Small block 링크 # num_link : 필요로 하는 전체 링크 수 #",
"last_no += 1 # XBBD 처리하기 if total_bbd_num > 109: t_num = (total_bbd_num",
"name: return True else: return False # --------------------------------------------------------------------- # 스트림을 연다 # ---------------------------------------------------------------------",
"+= self.parent.mm[off:off + self.read_size] if self.parent.verbose: print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0, 80) return data[:size]",
"4:] self.__set_bblock(n, buf) if self.verbose: print buf = get_bblock(self.mm, n, self.bsize) kavutil.HexDump().Buffer(buf, 0,",
"개수보다 많거나 같으면 추가 블록 개수만 파일 뒤에 추가하기 self.mm += '\\x00' *",
"# BBD 추가하기 bbd_no = [] b_data = '\\xff' * self.bsize * b_num",
"= [] # 특수 목적의 Big Block 번호. 해당 블록은 0xfffffffd로 처리해야 함",
"= fileformat # HWP 인가? o = OleFile(filename) try: pics = o.openstream('FileHeader') d",
"kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) bbd_link = [] for i in",
"listdir(self, streams=True, storages=False): ret = [] for p in self.__full_list: if p['Type'] ==",
"range(b_num): bbd_no.append(last_no) last_no += 1 # 최종 조합 self.mm += x_data + b_data",
"' - ' if p['Next'] == 0xffffffff else '%4d ' % p['Next'] t",
"= verbose self.mm = mm self.pps = pps self.bsize = bsize self.ssize =",
"arcclose(self) # 압축 파일 핸들을 닫는다. # --------------------------------------------------------------------- def arcclose(self): for fname in",
"name: no = p['Node'] break else: no = -1 if no == -1:",
"' if p['Start'] == 0xffffffff else '%8X ' % p['Start'] tname = p['Name'].encode(sys.stdout.encoding,",
"o = self.__get_handle(filename) for name in o.listdir(): file_scan_list.append(['arc_ole', name]) return file_scan_list except: pass",
"--------------------------------------------------------------------- # OLE 영역의 특정 위치에 1개의 Big Block Overwrite하기 (내장) # ---------------------------------------------------------------------",
"# 최종 조합 self.mm += x_data + b_data + add_data + attach_data #",
"수가 너무 많음 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('FileHeader') d =",
"특정 노드의 Max 값을 가진 node를 찾기 no = node while True: pps",
"self.pps[x]['Prev'] in scaned_pps_node: self.pps[x]['Prev'] = 0xffffffff else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next'] != 0xffffffff:",
"bbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + bbd[(no + 1) * 4:] if self.verbose:",
"and self.pps[x]['Type'] != 2 and len(self.pps[x]['Name']) == 0: continue except IndexError: if (x",
"i) # self.mm에 BBD 적용하기 t, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose)",
"in range(num_of_xbbd_blocks): t_data = get_bblock(self.mm, next_b, self.bsize) print kavutil.HexDump().Buffer(self.mm, (next_b+1) * self.bsize) next_b",
"'%d' % num_of_xbbd_blocks) if num_of_bbd_blocks > 109: # bbd list 개수가 109보다 크면",
"# 이전에 열린 핸들이 존재하는가? zfile = self.handle.get(filename, None) else: zfile = OleFile(filename,",
"> 109: # bbd list 개수가 109보다 크면 xbbd를 가져와야 함 next_b =",
"--------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) #",
"-1: raise Error('PPS name(%s) is invalid.' % name) # self.init(self.mm) # return ow",
"self.__set_bblock(n, buf) if self.verbose: print buf = get_bblock(self.mm, n, self.bsize) kavutil.HexDump().Buffer(buf, 0, 0x200)",
"d2 = pics.read() o.close() ''' # XBBD 늘어나는 경우 # o = OleFile('xbbd2.ppt',",
"--------------------------------------------------------------------- # uninit(self) # 플러그인 엔진을 종료한다. # 리턴값 : 0 - 성공,",
"root로 보낸다. t_no = next_no else: # prev_no == 0xffffffff and next_no ==",
"filename_ex - 압축 파일 내부 파일 이름 # 리턴값 : {파일 포맷 분석",
"else: # 삭제 처리 o.delete(a_name) except IOError: # print file_info.get_filename_in_archive() pass o.close() #",
"in range(len(num_list)): num_list.pop(0) end = e return start, end def read(self): pps =",
"bsize self.ssize = ssize self.bbd = bbd self.bbd_fat = bbd_fat self.sbd = sbd",
"self.pps[0] r_size = root['Size'] r_no = root['Start'] # SBD 링크를 생성한다. sbd_link =",
"# Stream만 저장 p = {'Node': node, 'Name': name[1:], 'Type': self.pps[node]['Type']} self.__full_list.append(p) if",
"no = -1 if no == -1: raise Error('PPS name(%s) is invalid.' %",
"if self.pps[x]['Prev'] != 0xffffffff: if self.pps[x]['Prev'] in scaned_pps_node: self.pps[x]['Prev'] = 0xffffffff else: f.append(self.pps[x]['Prev'])",
"= 'OLE Library' # 엔진 설명 info['kmd_name'] = 'ole' # 엔진 파일 이름",
"원래 이미지에 SBD 덮어쓰기 sbd_no = kavutil.get_uint32(self.mm, 0x3c) # sbd_list_array = get_block_link(sbd_no, self.bbd)",
"%-32s %4s %-4s %-4s %-4s %8s %8s' % ('No', 'Name', 'Type', 'Prev', 'Next',",
"= False for x in num_list: if e + 1 == x: e",
"try: if self.pps[x]['Type'] != 1 and self.pps[x]['Type'] != 2 and len(self.pps[x]['Name']) == 0:",
"kavutil.HexDump().Buffer(data, 0, 80) return data[:size] def close(self): pass # ----------------------------------------------------------------- for p in",
"PPS Tree 검증 if self.__valid_pps_tree() is False: return False if self.verbose: print kavutil.vprint('Property",
"return 0 # --------------------------------------------------------------------- # PPS 전체 경로 구하기 (스트림만 출력) # ---------------------------------------------------------------------",
"변수 self.__deep = None self.__full_list = None self.init(buf) def init(self, buf): # OLE",
"size) + buf[t_off + 4:] if start is not None: t_off = off",
"for p in self.__full_list: if p['Type'] == 2 and streams: ret.append(p['Name']) elif p['Type']",
"= self.mm[:0x44] + data + self.mm[0x4C:] else: data = struct.pack('<L', total_xbbd_num) self.mm =",
"mm self.pps = pps self.bsize = bsize self.ssize = ssize self.bbd = bbd",
"ID # arc_name - 압축 파일 # fname_in_arc - 압축 해제할 파일 이름",
"알려준다. (제작자, 버전, ...) # 리턴값 : 플러그인 엔진 정보 # --------------------------------------------------------------------- def",
"t_link = get_block_link(r_no, self.bbd) # 이전 Small Block의 링크를 구함 t_link = get_block_link(r_no,",
"데이터가 출력되어 주석 처리 if self.verbose: print if num_of_bbd_blocks < 109: kavutil.HexDump().Buffer(self.mm, 0x4c,",
"파일이 있는지를 조사한다. fsize = len(mm) bsize = 1 << kavutil.get_uint16(mm, 0x1e) rsize",
"= (t_idx % ((bsize / 4) - 1)) next_b = xbbd_start_block for i",
": %d %s\") % (node, self.deep, name) # if self.pps[node]['Type'] != 5: #",
"= True self.fname = input_data self.fp = open(input_data, 'rb') buf = self.fp.read() else:",
"{'Node': node, 'Name': name[1:], 'Type': self.pps[node]['Type']} self.__full_list.append(p) if self.pps[node]['Dir'] != 0xFFFFFFFFL: self.__deep +=",
"큼 if ord(pps[0]) & 0xF0 == 0x00 and ord(pps[1]) == 0x00: name =",
"True: try: next_b = fat[next_b] if next_b == 0xfffffffe: break if len(ret) %",
"4] buf = get_bblock(self.mm, n, self.bsize) off = ((node % 4) * 0x80)",
"출력) # --------------------------------------------------------------------- def listdir(self, streams=True, storages=False): ret = [] for p in",
"write_mode=True, verbose=True) pics = o.openstream('FileHeader') d = pics.read() d = d + d",
"= sbd[:no*4] + data + sbd[(no+1)*4:] no = t_link[-1] sbd = sbd[:no *",
"self.bbd_fat = bbd_fat self.sbd = sbd self.sbd_fat = sbd_fat self.root_list_array = root_list_array self.small_block",
"= b_num while True: if old_num_bbd + b_num > 109: t_num = (old_num_bbd",
"존재하는 특정 스트림의 크기를 조정한다. (내장) # node : PPS 인덱스 # size",
"= kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i] = n if self.verbose: open('bbd.dmp', 'wb').write(self.bbd) print kavutil.vprint('BBD') print",
"len(data): # raise error('Not Support : BBD -> BBD (Dec)') # 개발 완료",
"import types import kernel import kavutil # ------------------------------------------------------------------------- # 메시지 출력 함수 #",
"t_no = prev_no elif prev_no == 0xffffffff and next_no != 0xffffffff: # Next만",
"# 이전 링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_num",
"(self.bsize/4) # 한개의 BBD list 블록에 들어갈 수 있는 Big Block 개수 for",
"# bbd : 수정된 BBD 이미지 # --------------------------------------------------------------------- def __modify_bbd(self, bbd): self.bbd =",
"kavutil.vprint('Small Blocks') print self.small_block return True # --------------------------------------------------------------------- # PPS Tree의 유효성을 체크한다.",
"in scaned_pps_node: self.pps[x]['Dir'] = 0xffffffff else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return True # --------------------------------------------------------------------- #",
"= self.root_list_array[node / 4] buf = get_bblock(self.mm, n, self.bsize) off = ((node %",
"* add_num) # 추가해야 할 BBD list 개수는 한개의 BBD에는 bsize / 4",
"링크가 존재하면 종료 break ret.append(next_b) except KeyError: break return ret # --------------------------------------------------------------------- #",
"add_num = num - n # 추가해야 할 블록 수 add_data = ('\\x00'",
"no in enumerate(sbd_link) if (no == 0xffffffff)] if old_link: ret_link = old_link +",
"' + '%2d %-23s %d %8X %8X %8X %8X %8d' % (self.pps.index(p), p['Name'],",
"init(self, buf): # OLE 주요 데이터 self.mm = buf self.bsize = 0 self.ssize",
"0x1000: # read_size = self.bsize fat = self.bbd else: # read_size = self.ssize",
"74) for p in self.pps: print ' ' + '%2d %-23s %d %8X",
"= (n + 1) * self.bsize self.mm = self.mm[:off] + t_data[i * self.bsize:(i",
"for i in range(len(t_link)-1): no = t_link[i+1] data = struct.pack('<L', no) no =",
"# 섹터가 변화는 것은 Dec, Inc가 의미 없음 n = (len(data) / self.ssize)",
"print hex(no), hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no = kavutil.get_uint32(bbd_list_array, seg*4) t_off = ((t_no",
"= {} pps = self.root[i*0x80:(i+1)*0x80] t_size = min(kavutil.get_uint16(pps, 0x40), 0x40) if t_size !=",
"최종 조합 self.mm += x_data + b_data + add_data + attach_data # 특수",
"in range(x_num): x_data += '\\xff\\xff\\xff\\xff' * ((self.bsize/4) - 1) if i != (x_num-1):",
"!= 0x40: # 이상 파일 정보 처리 return False # bbd 읽기 self.bbd_list_array,",
"def mkarc(self, arc_engine_id, arc_name, file_infos): if arc_engine_id == 'arc_ole': o = OleFile(arc_name, write_mode=True)",
"= d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') # var -> foo d = zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d) o.close()",
"if self.pps[x]['Next'] in scaned_pps_node: self.pps[x]['Next'] = 0xffffffff else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir'] !=",
"len(org_link_list) > num_link: # SBD를 배열로 바꾸기 t_link = [] for i in",
"--------------------------------------------------------------------- def __decrease_bbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: # BBD를 배열로 바꾸기",
"((self.bsize - 4) / 4)) + (1 if (t_num % ((self.bsize - 4)",
"p in self.__full_list: if p['Type'] == 2 and streams: ret.append(p['Name']) elif p['Type'] ==",
"if self.pps[node]['Prev'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix) if self.pps[node]['Next'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix) return",
"1 == x: e = x loop = True continue else: while loop:",
"Block 번호. 해당 블록은 0xfffffffd로 처리해야 함 x_data = '' # b_data =",
"생성한다. sbd_link = [] for i in range(len(sbd) / 4): sbd_link.append(kavutil.get_uint32(sbd, i*4)) #",
"-1 if no == -1: raise Error('PPS name is invalid.') return Stream(self, no)",
"buf[t_off + 4:] if pps_dir is not None: t_off = off + 0x4C",
"d[:0x11] == 'HWP Document File': val = ord(d[0x24]) ret['ff_hwp'] = {'compress': (val &",
"# 사용하지 않는 SBD 링크를 찾는다. free_link = [i for i, no in",
"리스트를 알려준다. # 리턴값 : 악성코드 리스트 # --------------------------------------------------------------------- def listvirus(self): # 진단",
"reset_stream: size = target_pps['Size'] t = ow.write(no, '\\x00' * size) # 모든 데이터를",
"1 << kavutil.get_uint16(buf, 0x1e) if idx >= num_of_bbd_blocks: # 범위를 벗어나면 에러 return",
"충분히 존재함... return # 추가할 필요 없음 # 잔여 개수 체크하기 last_no =",
"= [] for i in range(len(name) / 2): wch.append(kavutil.get_uint16(name, i * 2)) for",
"0xfffffffd로 처리해야 함 x_data = '' # b_data = '' # add_data =",
"target_pps['Size'] t = ow.write(no, '\\x00' * size) # 모든 데이터를 0으로 Wipe t",
"print kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd, 0, 0x80) # Root 읽기 root_startblock = kavutil.get_uint32(self.mm, 0x30)",
"= t_link[i] sbd = sbd[:no*4] + data + sbd[(no+1)*4:] no = t_link[-1] sbd",
"arc_name, fname_in_arc) # 입력값 : arc_engine_id - 압축 엔진 ID # arc_name -",
"링크를 생성한다. sbd_link = [] for i in range(len(sbd) / 4): sbd_link.append(kavutil.get_uint32(sbd, i*4))",
"# 한개의 BBD list 블록에 들어갈 수 있는 Big Block 개수 for no",
"[] # 특수 목적의 Big Block 번호. 해당 블록은 0xfffffffd로 처리해야 함 x_data",
"scaned_pps_node.append(self.pps[x]['Dir']) return True # --------------------------------------------------------------------- # PPS 전체 경로 구하기 (내장) # ---------------------------------------------------------------------",
"# 기존에는 SBD 사용 # raise error('Not Support : SBD -> BBD') #",
"= 0xffffffff # SBD 배열을 SBD 버퍼로 바꾸기 self.sbd = '' for i",
"할 용량 add_big_num = (size / self.bsize) + (1 if (size % self.bsize)",
"return start, end def read(self): pps = self.parent.pps[self.node] sb = pps['Start'] size =",
"self.__modify_bbd(bbd) else: # 기존에는 SBD 사용 if org_size >= len(data): # raise error('Not",
"arc_name, file_infos) # 입력값 : arc_engine_id - 압축 가능 엔진 ID # arc_name",
"size) # 모든 데이터를 0으로 Wipe t = ow.delete(no) if t: self.init(t) #",
"# ----------------------------------------------------------------- # 스트림 전용 클래스 # ----------------------------------------------------------------- class Stream: def __init__(self, parent,",
"self.exploit = [] # 취약점 존재 여부 # 임시 변수 self.__deep = None",
"no = -1 if no == -1: raise Error('PPS name is invalid.') return",
"write_stream(self, name, data): for p in self.__full_list: if p['Name'] == name: no =",
"'wb').write(self.mm) # --------------------------------------------------------------------- # OLE 파싱하기 # --------------------------------------------------------------------- def parse(self): buf = self.mm[:8]",
"self.root_list_array, self.small_block, self.verbose) t = ow.write(no, data) if t: self.init(t) # 새롭게 OLE",
"def __write_data_to_big_block(self, t_data, t_link): for i, n in enumerate(t_link): off = (n +",
"노드의 경우 더이상 분석하지 않기 위해 처리 f = [] if len(self.pps) ==",
"# print '[-] filename :', rname, len(buf) # print '[-] rname :', o.write_stream(a_name,",
"추가하기 # t_link = get_block_link(org_sb, self.sbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb,",
"+ '%2d %-23s %d %8X %8X %8X %8X %8d' % (self.pps.index(p), p['Name'], p['Type'],",
"def close(self): pass # ----------------------------------------------------------------- for p in self.__full_list: if p['Name'] == name:",
"num_of_bbd_blocks * 4) else: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 109) next_b = xbbd_start_block for",
"'' for no in root_list_array: self.root += get_bblock(self.mm, no, self.bsize) if self.verbose: open('root.dmp',",
"if t_size != 0: # 출력시 이름이 깨질 가능성이 큼 if ord(pps[0]) &",
"self.bbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_bbd_link(t_link, n) # 필요한 개수로 링크",
"+ struct.pack('<L', last_no) off = (next_b + 1) * self.bsize # t_data의 위치",
"# 삭제 노드 값은 모두 지우기 self.__set_pps_header(del_no, size=0, start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True)",
"핸들을 얻는다. # 입력값 : filename - 파일 이름 # 리턴값 : 압축",
"변수 선언 info['author'] = '<NAME>' # 제작자 info['version'] = '1.1' # 버전 info['title']",
"p['Prev'] = kavutil.get_uint32(pps, 0x44) p['Next'] = kavutil.get_uint32(pps, 0x48) p['Dir'] = kavutil.get_uint32(pps, 0x4c) p['Start']",
"= self.__modify_small_block_link(t_link, t_num) # Small block 갱신 self.bbd_fat = {} for i in",
"get_bblock(self.mm, no, self.bsize) self.bbd_fat = {} for i in range(len(self.bbd) / 4): n",
"e == num_list.pop(0): break end = e break else: for i in range(len(num_list)):",
"추가될 수 있기 때문에... old_b_num = b_num while True: if old_num_bbd + b_num",
"이미지 # --------------------------------------------------------------------- def __modify_bbd(self, bbd): self.bbd = bbd # 체크 !!! bbd_list_array,",
"n in enumerate(t_link): off = (n + 1) * self.bsize self.mm = self.mm[:off]",
"Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정, start",
"이름 # 리턴값 : 압축 파일 핸들 # --------------------------------------------------------------------- def __get_handle(self, filename): if",
"None: t_off = off + 0x74 buf = buf[:t_off] + struct.pack('<L', start) +",
"= OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('FileHeader') d = pics.read() d = d",
"self.small_block = small_block def __get_root_node(self, node): # 해당 정보를 가진 root를 찾기 for",
"import zlib # o = OleFile('normal.hwp', write_mode=True, verbose=True) o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True)",
"= [] och = [] for i in range(len(name) / 2): wch.append(kavutil.get_uint16(name, i",
"등록한다. blank_next_no = self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no) elif prev_no != 0xffffffff and next_no ==",
"small block link 얻기 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) if self.verbose: print kavutil.vprint('Small Blocks')",
"pps_prev=t_no) elif pps['Next'] == del_no: self.__set_pps_header(root_no, pps_next=t_no) else: # Dir self.__set_pps_header(root_no, pps_dir=t_no) #",
"data + self.mm[off+self.bsize:] # --------------------------------------------------------------------- # BBD를 수정한다. # bbd : 수정된 BBD",
"의미 없음 n = (len(data) / self.bsize) + (1 if (len(data) % self.bsize)",
"* 4) else: t_idx = idx - 109 seg = (t_idx / ((bsize",
"전체 bbd_list num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(buf,",
"4:] if pps_prev is not None: t_off = off + 0x44 buf =",
"arc_engine_id - 압축 가능 엔진 ID # arc_name - 최종적으로 압축될 압축 파일",
"+ (idx * 4) else: t_idx = idx - 109 seg = (t_idx",
"# --------------------------------------------------------------------- def unarc(self, arc_engine_id, arc_name, fname_in_arc): data = None if arc_engine_id ==",
"%-20s %4s %-8s %-8s %-8s %-8s %-8s' % ('No', 'Name', 'Type', 'Prev', 'Next',",
"link 추가 요청한다. (원본 이미지의 BBD link가 수정 됨) # old_link : 기존",
"Support : BBD -> BBD (Inc)') n = (len(data) / self.bsize) + (1",
"* 4)) for i, n in enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list)",
"t_no = prev_no # 2. prev 노드 하위에 next가 없는 node를 찾아서 del_pps의",
"p['Size']) ''' print ' %-2s %-32s %4s %-4s %-4s %-4s %8s %8s' %",
"self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) target_pps = self.pps[no] if target_pps['Valid'] and",
"return ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD link 추가 요청한다. (원본",
"# MisiBase64 인코더 디코더 # --------------------------------------------------------------------- def MsiBase64Encode(x): ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x",
"in t_link: self.bbd += struct.pack('<L', i) # self.mm에 BBD 적용하기 t, num_of_bbd_blocks, num_of_xbbd_blocks,",
"self.pps[x]['Dir'] in scaned_pps_node: self.pps[x]['Dir'] = 0xffffffff else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return True # ---------------------------------------------------------------------",
"# OLE 내부 링크 구하기 # --------------------------------------------------------------------- def get_block_link(no, bbd_or_sbd_fat): ret = []",
"- ' if p['Next'] == 0xffffffff else '%4d ' % p['Next'] t +=",
"struct.pack('<L', last_no+1) # 다음 블록을 가리켜야 함으로 1를 더함 else: x_data += '\\xfe\\xff\\xff\\xff'",
"< 0: return [] sbd = self.sbd if self.verbose: open('sbd.dm2', 'wb').write(sbd) # SBD",
"파일 포맷 분석 정보 # 리턴값 : [[압축 엔진 ID, 압축된 파일 이름]]",
"= len(t_data) - (len(t_link) * self.ssize) t_num = (t_size / self.ssize) + (1",
"next_b == 0xfffffffe: return -1 t_buf = get_bblock(buf, next_b, bsize) next_b = kavutil.get_uint32(t_buf,",
"노드 값을 root로 보낸다. t_no = 0xffffffff # root 노드를 수정한다. pps =",
"(total_bbd_num - 109) total_xbbd_num = (t_num / ((self.bsize - 4) / 4)) +",
"경우의 수가 너무 많음 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('FileHeader') d",
"== 0: if next_b in ret: # 이미 링크가 존재하면 종료 break ret.append(next_b)",
"i in range(len(bbd_list_array)/4): n = kavutil.get_uint32(bbd_list_array, i*4) self.bbd += get_bblock(self.mm, n, self.bsize) #",
"return False if self.pps[0]['Dir'] != 0xffffffff and self.pps[0]['Type'] == 5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid']",
"range(num_of_xbbd_blocks): t_data = get_bblock(buf, next_b, bsize) bbd_list_array += t_data[:-4] next_b = kavutil.get_uint32(t_data, bsize-4)",
"개수만 파일 뒤에 추가하기 self.mm += '\\x00' * self.bsize * num # 실제",
"next_no else: # prev_no == 0xffffffff and next_no == 0xffffffff: # 단일 노드",
"num_list: if e + 1 == x: e = x loop = True",
"for i in range(len(self.bbd) / 4): t_link.append(kavutil.get_uint32(self.bbd, i * 4)) t = org_link_list[num_link:]",
"%22s %8d' % (self.pps.index(p), tname, p['Type'], t, p['Size']) # PPS 전체 경로 구하기",
"xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(buf, next_b, bsize) bbd_list_array += t_data[:-4]",
"arclist(self, filename, fileformat) # 압축 파일 내부의 파일 목록을 얻는다. # 입력값 :",
"for p in self.__full_list: if p['Name'] == name: no = p['Node'] break else:",
"= kavutil.get_uint32(t_data, self.bsize-4) ''' if len(self.bbd_list_array)/4 < num_of_bbd_blocks: return False self.bbd = ''",
"old_b_num = b_num while True: if old_num_bbd + b_num > 109: t_num =",
"특수 블록 처리 (bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block) bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm)",
"& 0x3f)) och.append(ch) ret_str = '' for ch in och: ret_str += struct.pack('<H',",
"= xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(self.mm, next_b, self.bsize) print kavutil.HexDump().Buffer(self.mm,",
"struct.pack('<L', i) # self.mm에 SBD 적용하기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) sbd_list_array = get_block_link(sbd_startblock,",
"True # --------------------------------------------------------------------- # PPS Tree의 유효성을 체크한다. (내장) # --------------------------------------------------------------------- def __valid_pps_tree(self):",
"= self.sbd for no in t_link: sbd = sbd[:no*4] + '\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:]",
"= num * self.ssize # 추가해야 할 용량 add_big_num = (size / self.bsize)",
"= ow.delete(no) if t: self.init(t) # 새롭게 OLE 재로딩 elif target_pps['Valid'] and target_pps['Type']",
"파일 # fname_in_arc - 압축 해제할 파일 이름 # 리턴값 : 압축 해제된",
"= off + 0x44 buf = buf[:t_off] + struct.pack('<L', pps_prev) + buf[t_off +",
"e + 1 == x: e = x loop = True continue else:",
"4) else: t_idx = idx - 109 seg = (t_idx / ((bsize /",
"# 뒤쪽 쓸모 없는 부분은 제거 attach_data = self.mm[size:] # 파일 뒤에 붙어",
"else: # 여유분이 부족함. 따라서 Root를 늘려야 함 size = num * self.ssize",
"# ------------------------------------------------------------------------- class Error(Exception): pass # --------------------------------------------------------------------- # MisiBase64 인코더 디코더 # ---------------------------------------------------------------------",
"- ' if p['Dir'] == 0xffffffff else '%4d ' % p['Dir'] t +=",
"0: pps_name = '' name = prefix + pps_name else: if self.pps[node]['Valid'] is",
"self.bsize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat)",
"self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) t = ow.write(no, data)",
"t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_sbd_link(t_link, n) #",
"이름 # filename_ex - 압축 파일 내부 파일 이름 # 리턴값 : {파일",
"= idx - 109 seg = (t_idx / ((bsize / 4) - 1))",
"새롭게 OLE 재로딩 # --------------------------------------------------------------------- # OleWriteStream 클래스 # --------------------------------------------------------------------- class OleWriteStream: def",
"(node, self.deep, name) # if self.pps[node]['Type'] != 5: # Stream만 저장 p =",
"MisiBase64 인코더 디코더 # --------------------------------------------------------------------- def MsiBase64Encode(x): ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x >",
"포맷 정보를 담을 공간 'Attached_Pos': rsize, 'Attached_Size': fsize - rsize } ret['ff_attach'] =",
"if pps['Prev'] == node or pps['Next'] == node or pps['Dir'] == node: return",
"SBD에 링크 연결하기 else: # 이전 링크가 없다면... ret_link = free_link[:add_num] # 최종",
"처리함 return 0 pps_name = self.pps[node]['Name'].encode('cp949', 'ignore') name = prefix + '/' +",
"if self.pps[x]['Prev'] in scaned_pps_node: self.pps[x]['Prev'] = 0xffffffff else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next'] !=",
"bbd = bbd[:no*4] + data + bbd[(no+1)*4:] no = t_link[-1] bbd = bbd[:no",
"여유분이 부족함. 따라서 Root를 늘려야 함 size = num * self.ssize # 추가해야",
"' ' + ('-' * 74) for p in self.pps: print ' '",
"self.pps[x]['Type'] != 1 and self.pps[x]['Type'] != 2 and len(self.pps[x]['Name']) == 0: continue except",
"PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 SBD의 링크는",
"OLE 파일인지 확인한다. # --------------------------------------------------------------------- def is_olefile(filename): try: buf = open(filename, 'rb').read(8) if",
"0x2), 'viewtext': (val & 0x4 == 0x4)} except Error: pass o.close() return ret",
"# 스트림이 존재하는가? # --------------------------------------------------------------------- def exists(self, name): for p in self.__full_list: if",
"여부 # 임시 변수 self.__deep = None self.__full_list = None self.init(buf) def init(self,",
"next_b = xbbd_start_block if num_of_xbbd_blocks == 1: t_data = get_bblock(self.mm, next_b, self.bsize) else:",
"0x180: buf = buf[:off] + '\\x00' * 0x80 elif del_info: buf = buf[:off]",
"존재 여부 # 임시 변수 self.__deep = None self.__full_list = None self.init(buf) def",
"이전 링크 수집하기 t_link = self.__decrease_sbd_link(t_link, n) # 필요한 개수로 링크 줄이기 #",
"--------------------------------------------------------------------- # listvirus(self) # 진단/치료 가능한 악성코드의 리스트를 알려준다. # 리턴값 : 악성코드",
"div_n) * self.parent.ssize data += self.parent.mm[off:off + self.read_size] if self.parent.verbose: print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data,",
"* self.bsize) t_num = (t_size / self.bsize) + (1 if (t_size % self.bsize)",
"i in range(len(self.root) / 0x80): p = {} pps = self.root[i*0x80:(i+1)*0x80] t_size =",
"off = get_bbd_list_index_to_offset(self.mm, old_num_bbd + i) # print hex(off) self.mm = (self.mm[:off] +",
"찾는다. free_link = [i for i, no in enumerate(sbd_link) if (no == 0xffffffff)]",
"# self.mm에 SBD 적용하기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) for",
"= self.mm[:off] + t_data[i * self.ssize:(i + 1) * self.ssize] + self.mm[off +",
"0x90900000) == 0x90900000: # CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return False else: # CVE-2003-0347 취약점",
"한개의 BBD list 블록에 들어갈 수 있는 Big Block 개수 for no in",
"self.parent.verbose # 연속된 숫자 값을 리턴한다. # TODO : 임시로 작성한거라 최적화 필요함",
"% xbbd_start_block) kavutil.vprint(None, 'Num of XBBD Blocks', '%d' % num_of_xbbd_blocks) if num_of_bbd_blocks >",
"old_link: ret_link = old_link + free_link[:add_num] # 최종 결과의 BBD 링크 t_link =",
"필요한가? self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # BBD 링크를 처음 생성하므로 이전",
"--------------------------------------------------------------------- # OLE 내부 링크 구하기 # --------------------------------------------------------------------- def get_block_link(no, bbd_or_sbd_fat): ret =",
"참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if pps[0x50:0x60]",
"== 1: t_data = get_bblock(self.mm, next_b, self.bsize) else: t_data = '' for i",
"링크를 찾는다. free_link = [i for i, no in enumerate(sbd_link) if (no ==",
"+= get_bblock(self.mm, no, self.bsize) bbd_link = [] for i in range(len(bbd) / 4):",
"추가해야 할 블록 수 add_data = ('\\x00' * self.bsize * add_num) # 추가해야",
"BBD List에 BBD 등록하기 for i, no in enumerate(bbd_no): off = get_bbd_list_index_to_offset(self.mm, old_num_bbd",
"(n % div_n) * self.parent.ssize data += self.parent.mm[off:off + self.read_size] if self.parent.verbose: print",
"get_bbd_list_array(buf, verbose=False): bbd_list_array = buf[0x4c:0x200] # 전체 bbd_list num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block",
"= get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) #",
"kavutil.HexDump().Buffer(self.mm, 0, 0x60) print if self.bsize % 0x200 != 0 or self.ssize !=",
"if __name__ == '__main__': # import zlib # o = OleFile('normal.hwp', write_mode=True, verbose=True)",
"data + sbd[(no+1)*4:] no = t_link[-1] sbd = sbd[:no * 4] + '\\xfe\\xff\\xff\\xff'",
"zlib # o = OleFile('normal.hwp', write_mode=True, verbose=True) o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True) print",
"초기화 한다. # 인력값 : plugins_path - 플러그인 엔진의 위치 # verbose -",
"# 실제 마지막 Big Block 번호 n = (len(self.bbd) / 4 - 1)",
"self.ssize fat = self.sbd # org_list_array = get_block_link(org_sb, fat) ''' # 수정된 data를",
"'ff_ole' in fileformat: try: # OLE Stream 목록 추출하기 o = self.__get_handle(filename) for",
"{} for i in range(len(self.sbd) / 4): n = kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i] =",
"조사한다. fsize = len(mm) bsize = 1 << kavutil.get_uint16(mm, 0x1e) rsize = (fsize",
"t += ' - ' if p['Start'] == 0xffffffff else '%8X ' %",
"sbd_fat self.root_list_array = root_list_array self.small_block = small_block def __get_root_node(self, node): # 해당 정보를",
"4) - 1)) else 0) off = (t_idx % ((bsize / 4) -",
"단일 노드 # 1. 0xffffffff 노드 값을 root로 보낸다. t_no = 0xffffffff #",
"# --------------------------------------------------------------------- # 스트림을 연다 # --------------------------------------------------------------------- def openstream(self, name): # ----------------------------------------------------------------- #",
"print kavutil.vprint('Small Blocks') print self.small_block return True # --------------------------------------------------------------------- # PPS Tree의 유효성을",
"no) # --------------------------------------------------------------------- # 스트림의 데이터를 덮어쓴다. # --------------------------------------------------------------------- def write_stream(self, name, data):",
"xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) bbd_list_array = [] for i in range(len(t) /",
"--------------------------------------------------------------------- def __set_bblock(self, no, data): off = (no + 1) * self.bsize if",
"'rb') buf = self.fp.read() else: buf = input_data else: raise Error('Input data is",
"4:] # SBD가 나누어 bsize 단위가 아니면 맞춘다. n = len(sbd) % self.bsize",
"기존에는 SBD 사용 # raise error('Not Support : SBD -> BBD') # 섹터가",
"유효성을 체크한다. (내장) # --------------------------------------------------------------------- def __valid_pps_tree(self): scaned_pps_node = [0] # 이미 분석한",
"next_b, bsize) next_b = kavutil.get_uint32(t_buf, bsize-4) return (next_b + 1) * bsize +",
"special_no += bbd_no # 특수 블록 처리 (bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block) bbd_list_array, num_of_bbd_blocks,",
"플러그인 엔진을 종료한다. # 리턴값 : 0 - 성공, 0 이외의 값 -",
"= xbbd_start_block if num_of_xbbd_blocks == 1: t_data = get_bblock(self.mm, next_b, self.bsize) else: t_data",
"= '' name = prefix + pps_name else: if self.pps[node]['Valid'] is False: #",
"= mm self.pps = pps self.bsize = bsize self.ssize = ssize self.bbd =",
"= buf[:t_off] + struct.pack('<L', pps_prev) + buf[t_off + 4:] if pps_next is not",
"오른쪽이 없으면 탐색 종료 break else: # 항상 오른쪽 노드가 큰 값임 no",
"= del_pps['Dir'] # root를 찾기 root_no = self.__get_root_node(del_no) # 양쪽 노드가 존재하는가? if",
"'%d' % num_of_bbd_blocks) kavutil.vprint(None, 'XBBD Start', '%08X' % xbbd_start_block) kavutil.vprint(None, 'Num of XBBD",
"- num_of_xbbd_blocks # 추가해야 할 XBBD 개수 # XBBD를 위한 헤더 수정 if",
"수집된 마지막 링크 이후에 존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_big_block_link(t_link, t_num)",
"t_off = off + 0x78 buf = buf[:t_off] + struct.pack('<L', size) + buf[t_off",
"idx - 109 seg = (t_idx / ((bsize / 4) - 1)) +",
"for no in special_no: seg = no / bb_num off = no %",
"# 유효한 PPS에 대한 삭제인지 확인 if reset_stream: size = target_pps['Size'] t =",
"= kavutil.get_uint32(pps, 0x44) p['Next'] = kavutil.get_uint32(pps, 0x48) p['Dir'] = kavutil.get_uint32(pps, 0x4c) p['Start'] =",
"1) * self.bsize] off = (no + 1) * self.bsize self.mm = self.mm[:off]",
"링크 수집하기 bbd = self.bbd for no in t_link: bbd = bbd[:no*4] +",
"t, p['Size']) # PPS 전체 경로 구하기 self.__deep = 0 self.__full_list = []",
"= (add_num / (self.bsize / 4)) + (1 if (add_num % (self.bsize /",
"전체 경로 구하기 self.__deep = 0 self.__full_list = [] try: self.__get_pps_path() except IndexError:",
"self.verbose) target_pps = self.pps[no] if target_pps['Valid'] and target_pps['Type'] == 2: # 유효한 PPS에",
"sbd = sbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + sbd[(no + 1) * 4:]",
"들어갈 수 있는 Big Block 개수 for no in special_no: seg = no",
"많은 데이터가 출력되어 주석 처리 if self.verbose: print if num_of_bbd_blocks < 109: kavutil.HexDump().Buffer(self.mm,",
"list(list_array) while len(t_list): s, e = self.get_liner_value(t_list) # 연속된 링크를 모두 수집해서 한꺼번에",
"# print no ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat,",
"2 # 실제 마지막 Big Block 번호 n = (len(self.bbd) / 4 -",
"= ssize self.bbd = bbd self.bbd_fat = bbd_fat self.sbd = sbd self.sbd_fat =",
"range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) bbd_link = []",
"self.bbd else: # read_size = self.ssize fat = self.sbd # org_list_array = get_block_link(org_sb,",
"len(t_data): # 블록 추가해야 하나? t_size = len(t_data) - (len(t_link) * self.ssize) t_num",
"존재함 # 참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C',",
"(self.pps.index(p), p['Name'], p['Type'], p['Prev'], p['Next'], p['Dir'], p['Start'], p['Size']) ''' print ' %-2s %-32s",
"0x3800 och.append(MsiBase64Encode(ch & 0x3f)) ch = MsiBase64Encode(((ch >> 6) & 0x3f)) och.append(ch) ret_str",
"--------------------------------------------------------------------- # arcclose(self) # 압축 파일 핸들을 닫는다. # --------------------------------------------------------------------- def arcclose(self): for",
"1: t_data = get_bblock(self.mm, next_b, self.bsize) else: t_data = '' for i in",
"%8X %8d' % (self.pps.index(p), p['Name'], p['Type'], p['Prev'], p['Next'], p['Dir'], p['Start'], p['Size']) ''' print",
"prev_no = del_pps['Prev'] next_no = del_pps['Next'] dir_no = del_pps['Dir'] # root를 찾기 root_no",
"/ self.parent.ssize off = (self.parent.small_block[n / div_n] + 1) * self.parent.bsize off +=",
"+ i) # print hex(off) self.mm = (self.mm[:off] + struct.pack('<L', no) + self.mm[off+4:])",
"self.sbd = None self.root = None self.pps = None self.small_block = None self.root_list_array",
"ole 파일 열기 self.handle[filename] = zfile return zfile # --------------------------------------------------------------------- # arclist(self, filename,",
"self.small_block return True # --------------------------------------------------------------------- # PPS Tree의 유효성을 체크한다. (내장) # ---------------------------------------------------------------------",
"80) return data[:size] def close(self): pass # ----------------------------------------------------------------- for p in self.__full_list: if",
"= buf[:t_off] + struct.pack('<L', pps_next) + buf[t_off + 4:] if pps_dir is not",
"('No', 'Name', 'Type', 'Prev', 'Next', 'Dir', 'SB', 'Size') print ' ' + ('-'",
"else: # CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return False self.pps[x]['Valid'] = True if self.pps[x]['Prev'] !=",
"of BBD Blocks', '%d' % num_of_bbd_blocks) kavutil.vprint(None, 'XBBD Start', '%08X' % xbbd_start_block) kavutil.vprint(None,",
"t_data[:-4] next_b = kavutil.get_uint32(t_data, bsize-4) return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block # --------------------------------------------------------------------- #",
"bbd += get_bblock(self.mm, no, self.bsize) if self.verbose: open('bbd.dm2', 'wb').write(bbd) bbd_link = [] for",
"if next_b == 0xfffffffe: return -1 t_buf = get_bblock(buf, next_b, bsize) next_b =",
"in t[1:]: t_link[i] = 0xffffffff # BBD 배열을 BBD 버퍼로 바꾸기 self.bbd =",
"쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_big_block(self, t_data, t_link): for i, n in enumerate(t_link):",
"vlist.sort() return vlist # --------------------------------------------------------------------- # format(self, filehandle, filename, filename_ex) # 파일 포맷을",
"if (len(t_link) * self.ssize) < len(t_data): # 블록 추가해야 하나? t_size = len(t_data)",
"print kavutil.HexDump().Buffer(self.mm, 0, 0x60) print if self.bsize % 0x200 != 0 or self.ssize",
"링크를 구함 self.__modify_big_block_link(t_link, add_big_num) # 이전 링크에 필요한 블록 수 추가하여 링크를 새롭게",
"0x4800: # 0x4800 - 0x483F # only one charecter can be decoded ch",
"prefix) if self.pps[node]['Next'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix) return 0 # --------------------------------------------------------------------- # PPS",
"None self.bbd_fat = {} self.sbd = None self.root = None self.pps = None",
"sbd_fat, root_list_array, small_block, verbose): self.verbose = verbose self.mm = mm self.pps = pps",
"# 참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28',",
"x_num b_num = (add_num / (self.bsize / 4)) + (1 if (add_num %",
"self.__full_list = [] try: self.__get_pps_path() except IndexError: pass # small block link 얻기",
"t_data = get_bblock(self.mm, next_b, self.bsize) else: t_data = '' for i in range(num_of_xbbd_blocks-1):",
"= get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 bbd = self.bbd for no in",
"(fsize / bsize) * bsize if fsize > rsize: fileformat = { #",
"' - ' if p['Start'] == 0xffffffff else '%8X ' % p['Start'] tname",
"# case1 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('Scripts/DefaultJScript') d = pics.read()",
"개수 for no in special_no: seg = no / bb_num off = no",
"것은 Dec, Inc가 의미 없음 n = (len(data) / self.ssize) + (1 if",
"pps_name else: if self.pps[node]['Valid'] is False: # 유효한 PPS만 처리함 return 0 pps_name",
"크기를 data 뒤쪽에 추가하기 t_num = len(t_data) / self.ssize # 몇개의 블록이 필요한가?",
"# arc_name - 압축 파일 # fname_in_arc - 압축 해제할 파일 이름 #",
"next_b, self.bsize) print kavutil.HexDump().Buffer(self.mm, (next_b+1) * self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) ''' if",
"= {'Node': node, 'Name': name[1:], 'Type': self.pps[node]['Type']} self.__full_list.append(p) if self.pps[node]['Dir'] != 0xFFFFFFFFL: self.__deep",
"is not None: t_off = off + 0x74 buf = buf[:t_off] + struct.pack('<L',",
"# --------------------------------------------------------------------- # BBD를 수정한다. # bbd : 수정된 BBD 이미지 # ---------------------------------------------------------------------",
"함 x_data = '' # b_data = '' # add_data = '' add_num",
"+ sbd[(no+1)*4:] self.__modify_sbd(sbd) else: # SBD를 사용한다. if org_size >= 0x1000: # 기존에는",
"/ 4): bbd_list_array.append(kavutil.get_uint32(t, i * 4)) for i, n in enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize])",
"0, 0x200) # --------------------------------------------------------------------- # SBD 링크를 줄인다 # org_link_list : 기존 Small",
"SBD가 나누어 bsize 단위가 아니면 맞춘다. n = len(sbd) % self.bsize if n:",
"file_infos: rname = file_info.get_filename() a_name = file_info.get_filename_in_archive() try: if os.path.exists(rname): with open(rname, 'rb')",
"# --------------------------------------------------------------------- def listdir(self, streams=True, storages=False): ret = [] for p in self.__full_list:",
"= get_block_link(org_sb, self.sbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전",
"get_block_link(sbd_startblock, self.bbd_fat) for i, n in enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list)",
"# 사전형 변수 선언 info['author'] = '<NAME>' # 제작자 info['version'] = '1.1' #",
"BBD list 개수 self.mm = self.mm[:0x2c] + struct.pack('<L', total_bbd_num) + self.mm[0x30:] last_no +=",
"ret = {} mm = filehandle # OLE 헤더와 동일 if mm[:8] ==",
"# 여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크",
"+ attach_data # 특수 블록에 BBD list도 추가 special_no += bbd_no # 특수",
"OLE Stream 목록 추출하기 o = self.__get_handle(filename) for name in o.listdir(): file_scan_list.append(['arc_ole', name])",
"t_buf = get_bblock(buf, next_b, bsize) next_b = kavutil.get_uint32(t_buf, bsize-4) return (next_b + 1)",
"# 수집된 마지막 링크 이후에 존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_big_block_link(t_link,",
"링크가 없다. t_link = self.__modify_small_block_link(None, t_num) bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) self.bbd",
"return self.mm def write(self, no, data): # 기존 PPS 정보를 얻는다 org_sb =",
"sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) for i, n in enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list",
"# --------------------------------------------------------------------- def __decrease_sbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: # SBD를 배열로",
"in range(len(self.root) / 0x80): p = {} pps = self.root[i*0x80:(i+1)*0x80] t_size = min(kavutil.get_uint16(pps,",
"self.ssize = 0 # 임시 변수 self.__deep = 0 self.__full_list = [] self.parse()",
"buf[:off] + '\\x00' * 0x80 + buf[off+0x80:] if size is not None: t_off",
"self.__decrease_sbd_link(t_link, n) # 필요한 개수로 링크 줄이기 # Small block 영역에 ssize 만큼씩",
"no == -1: raise Error('PPS name(%s) is invalid.' % name) # self.init(self.mm) #",
"self.bbd_fat) for i, n in enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) ==",
"kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 109) next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data",
"None self.ssize = None self.bbd_list_array = None self.bbd = None self.bbd_fat = {}",
"깨질 가능성이 큼 if ord(pps[0]) & 0xF0 == 0x00 and ord(pps[1]) == 0x00:",
"not None: t_off = off + 0x48 buf = buf[:t_off] + struct.pack('<L', pps_next)",
"else '%4d ' % p['Prev'] t += ' - ' if p['Next'] ==",
"= len(sbd) % self.bsize if n: t = self.bsize - n sbd +=",
"+= struct.pack('<H', ch) # print ret_str.decode('UTF-16LE', 'replace') return ret_str # --------------------------------------------------------------------- # OLE",
"1)) next_b = xbbd_start_block for i in range(seg): if next_b == 0xfffffffe: return",
"t_off = off + 0x48 buf = buf[:t_off] + struct.pack('<L', pps_next) + buf[t_off",
"else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return True # --------------------------------------------------------------------- # PPS 전체 경로 구하기 (내장)",
"zlib.decompress(d, -15) d = d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') # var -> foo d = zlib.compress(d)[2:]",
"& 0xF0 == 0x00 and ord(pps[1]) == 0x00: name = '_\\x00' + pps[2:t_size-2]",
"in enumerate(bbd_link) if (no == 0xffffffff and i < size / self.bsize)] if",
"can be decoded ch = MsiBase64Encode(ch - 0x4800) if not ch: continue else:",
"- 0x4800) if not ch: continue else: # 0x3800 - 0x383F # the",
"파일 포맷중에 OLE 파일 포맷이 있는가? if 'ff_ole' in fileformat: try: # OLE",
"and storages: ret.append(p['Name']) else: pass return ret # --------------------------------------------------------------------- # 스트림이 존재하는가? #",
"!= 0xffffffff: # 양쪽 모두 노트가 존재함 # 1. prev 노드 값을 root로",
"'wb').write(bbd) # 원래 이미지에 BBD 덮어쓰기 self.__modify_bbd(bbd) return ret_link # 연결된 링크 #",
"((self.bsize - 4) / 4)) else 0) x_num = total_xbbd_num - num_of_xbbd_blocks #",
"data = struct.pack('<L', no) no = t_link[i] sbd = sbd[:no*4] + data +",
"else: if self.pps[node]['Valid'] is False: # 유효한 PPS만 처리함 return 0 pps_name =",
"% self.ssize) else 0) t_data = data + ('\\x00' * ((n * self.ssize)",
"ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) return self.mm",
"self.__modify_sbd(sbd) else: # SBD를 사용한다. if org_size >= 0x1000: # 기존에는 BBD 사용",
"else: data = struct.pack('<L', total_xbbd_num) self.mm = self.mm[:0x48] + data + self.mm[0x4C:] #",
"+= '\\x00' * self.bsize * num # 실제 필요한 데이터 블록 self.mm +=",
"total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수 add_num += x_num b_num =",
"수 return info # --------------------------------------------------------------------- # listvirus(self) # 진단/치료 가능한 악성코드의 리스트를 알려준다.",
"node == 0: pps_name = '' name = prefix + pps_name else: if",
"buf[:t_off] + struct.pack('<L', pps_next) + buf[t_off + 4:] if pps_dir is not None:",
"(off * 4) self.mm = self.mm[:t_off] + '\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:] # print repr(self.mm[t_off:t_off+4])",
"((n * self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num =",
"xbbd_start_block) bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) bb_num = (self.bsize/4) # 한개의 BBD",
"BBD 링크 t_link = old_link[-1:] + free_link[:add_num] # BBD에 링크 연결하기 else: #",
"del_no): del_pps = self.pps[del_no] prev_no = del_pps['Prev'] next_no = del_pps['Next'] dir_no = del_pps['Dir']",
"== 0: # 정상적인 PPS가 없음 return False while len(f): x = f.pop(0)",
"있는가? if 'ff_ole' in fileformat: try: # OLE Stream 목록 추출하기 o =",
"self.__get_pps_path(self.pps[node]['Next'], prefix) return 0 # --------------------------------------------------------------------- # PPS 전체 경로 구하기 (스트림만 출력)",
"/ self.bsize) - 2 # 실제 마지막 Big Block 번호 n = (len(self.bbd)",
"o.close() ''' # ------------------------------------------------------------------------- # KavMain 클래스 # ------------------------------------------------------------------------- class KavMain: # ---------------------------------------------------------------------",
"= o.openstream(fname_in_arc) try: data = fp.read() except: data = None return data #",
"개수로 링크 줄이기 # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) #",
"사용 if org_size >= len(data): # raise error('Not Support : SBD -> SBD",
"= kavutil.get_uint32(pps, 0x48) p['Dir'] = kavutil.get_uint32(pps, 0x4c) p['Start'] = kavutil.get_uint32(pps, 0x74) p['Size'] =",
"주요 정보 info = dict() # 사전형 변수 선언 info['author'] = '<NAME>' #",
"== node or pps['Next'] == node or pps['Dir'] == node: return i def",
"- 최종적으로 압축될 압축 파일 이름 # file_infos - 압축 대상 파일 정보",
"보낸다. t_no = prev_no # 2. prev 노드 하위에 next가 없는 node를 찾아서",
"return info # --------------------------------------------------------------------- # listvirus(self) # 진단/치료 가능한 악성코드의 리스트를 알려준다. #",
"exists(self, name): for p in self.__full_list: if p['Name'] == name: return True else:",
"= bsize self.ssize = ssize self.bbd = bbd self.bbd_fat = bbd_fat self.sbd =",
"TODO : 임시로 작성한거라 최적화 필요함 def get_liner_value(self, num_list): start = None end",
"def listdir(self, streams=True, storages=False): ret = [] for p in self.__full_list: if p['Type']",
"# --------------------------------------------------------------------- # OLE 파일인지 확인한다. # --------------------------------------------------------------------- def is_olefile(filename): try: buf =",
"num_of_sbd_blocks = kavutil.get_uint32(self.mm, 0x40) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) self.sbd = '' for no",
"no = kavutil.get_uint32(bbd_list_array, i * 4) data = bbd[i * self.bsize:(i + 1)",
"/ 4) - 1)) else 0) off = (t_idx % ((bsize / 4)",
"fname_in_arc): data = None if arc_engine_id == 'arc_ole': o = self.__get_handle(arc_name) fp =",
"+ free_link[:add_num] # 최종 결과의 SBD 링크 t_link = old_link[-1:] + free_link[:add_num] #",
"PPS 읽기 self.pps = [] for i in range(len(self.root) / 0x80): p =",
"하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_bbd_link(self, org_link_list, num_link): if len(org_link_list) >",
"self.mm[:0x44] + data + self.mm[0x4C:] else: data = struct.pack('<L', total_xbbd_num) self.mm = self.mm[:0x48]",
"get_bbd_list_array(self.mm) # BBD를 모은다 bbd = '' for i in range(num_of_bbd_blocks): no =",
"get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_num = 0 if (len(t_link) * self.bsize)",
"kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) if self.verbose: open('bbd.dm2', 'wb').write(bbd) bbd_link =",
"+ (1 if (t_idx % ((bsize / 4) - 1)) else 0) off",
"XBBD 링크 추가 t_data = t_data[:-4] + struct.pack('<L', last_no) off = (next_b +",
"plugins_path, verbose=False): # 플러그인 엔진 초기화 self.handle = {} self.verbose = verbose return",
"# org_link_list : 기존 Small block 링크 # num_link : 필요로 하는 전체",
"# 필요한 개수로 링크 줄이기 # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data,",
"fsize > rsize: fileformat = { # 포맷 정보를 담을 공간 'Attached_Pos': rsize,",
"== 0: # 분석된 PPS가 없으면 종료 return False if self.pps[0]['Dir'] != 0xffffffff",
"_ = get_bbd_list_array(self.mm) self.bbd = '' for i in range(len(bbd_list_array)/4): n = kavutil.get_uint32(bbd_list_array,",
"size / self.bsize)] if len(free_link) >= num: # 여유분이 충분히 존재함... return #",
"< size / self.bsize)] if len(free_link) >= num: # 여유분이 충분히 존재함... return",
"= (t_size / self.ssize) + (1 if (t_size % self.ssize) else 0) self.__add_small_block_num(t_num)",
"self.bsize) kavutil.HexDump().Buffer(buf, 0, 0x200) # --------------------------------------------------------------------- # SBD 링크를 줄인다 # org_link_list :",
"kavutil.get_uint32(t_data, self.bsize-4) ''' if len(self.bbd_list_array)/4 < num_of_bbd_blocks: return False self.bbd = '' for",
"== -1: raise Error('PPS name(%s) is invalid.' % name) # self.init(self.mm) # return",
"((n*self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 # t_link = get_block_link(org_sb,",
"invalid.' % name) # self.init(self.mm) # return ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize,",
"self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) return self.mm # --------------------------------------------------------------------- #",
"/ self.bsize) + (1 if (len(data) % self.bsize) else 0) t_data = data",
"range(len(t_link)-1): no = t_link[i+1] data = struct.pack('<L', no) no = t_link[i] bbd =",
"1 # 최종 조합 self.mm += x_data + b_data + add_data + attach_data",
"= [0] # 이미 분석한 노드의 경우 더이상 분석하지 않기 위해 처리 f",
"try: if os.path.exists(rname): with open(rname, 'rb') as fp: buf = fp.read() # print",
"& 0x2 == 0x2), 'viewtext': (val & 0x4 == 0x4)} except Error: pass",
"if node == 0: pps_name = '' name = prefix + pps_name else:",
"+ b_num # 전체 BBD list 개수 self.mm = self.mm[:0x2c] + struct.pack('<L', total_bbd_num)",
"# org_list_array = get_block_link(org_sb, fat) ''' # 수정된 data를 쓰기 위해 준비한다 if",
"self.deep, name) # if self.pps[node]['Type'] != 5: # Stream만 저장 p = {'Node':",
"in enumerate(sbd_link) if (no == 0xffffffff)] if old_link: ret_link = old_link + free_link[:add_num]",
"no in sbd_list_array: self.sbd += get_bblock(self.mm, no, self.bsize) self.sbd_fat = {} for i",
"삭제 처리 o.delete(a_name) except IOError: # print file_info.get_filename_in_archive() pass o.close() # zfile.close() return",
"[] fat = bbd_or_sbd_fat next_b = no if next_b != 0xfffffffe: ret.append(next_b) while",
"in self.pps: print ' ' + '%2d %-23s %d %8X %8X %8X %8X",
"i * 4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]] = 0xfffffffe #",
"- rsize } ret['ff_attach'] = fileformat # HWP 인가? o = OleFile(filename) try:",
"수 있기 때문에... old_b_num = b_num while True: if old_num_bbd + b_num >",
"list도 추가 special_no += bbd_no # 특수 블록 처리 (bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block)",
"기존 BBD link # add_num : 추가 BBD link 개수 # --------------------------------------------------------------------- def",
"0x3c) num_of_sbd_blocks = kavutil.get_uint32(self.mm, 0x40) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) self.sbd = '' for",
"하나? t_size = len(t_data) - (len(t_link) * self.bsize) t_num = (t_size / self.bsize)",
"num - n # 추가해야 할 블록 수 add_data = ('\\x00' * self.bsize",
"data): off = (no + 1) * self.bsize if len(data) == self.bsize: self.mm",
"# 진단/치료 가능한 악성코드의 리스트를 알려준다. # 리턴값 : 악성코드 리스트 # ---------------------------------------------------------------------",
"t_off = off + 0x44 buf = buf[:t_off] + struct.pack('<L', pps_prev) + buf[t_off",
"p['Type'] == 1 and storages: ret.append(p['Name']) else: pass return ret # --------------------------------------------------------------------- #",
"% div_n) * self.parent.ssize data += self.parent.mm[off:off + self.read_size] if self.parent.verbose: print kavutil.vprint(pps['Name'])",
"= get_bblock(self.mm, next_b, self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) # 기존 XBBD 마지막에 새로운",
"임시 변수 self.__deep = 0 self.__full_list = [] self.parse() # OLE 파일을 분석",
"'%d' % num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd, 0, 0x80) # PPS 읽기 self.pps = []",
"div_n = self.parent.bsize / self.parent.ssize off = (self.parent.small_block[n / div_n] + 1) *",
"self.pps[node]['Dir'] != 0xFFFFFFFFL: self.__deep += 1 self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep -= 1 if self.pps[node]['Prev']",
"# --------------------------------------------------------------------- class OleFile: def __init__(self, input_data, write_mode=False, verbose=False): self.verbose = verbose #",
"self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # 기존에는 SBD 사용",
"인력값 : plugins_path - 플러그인 엔진의 위치 # verbose - 디버그 모드 (True",
"0 pps_name = self.pps[node]['Name'].encode('cp949', 'ignore') name = prefix + '/' + pps_name #",
"= self.root[i*0x80:(i+1)*0x80] t_size = min(kavutil.get_uint16(pps, 0x40), 0x40) if t_size != 0: # 출력시",
"# old_link : 기존 SBD link # add_num : 추가 SBD link 개수",
"and i < size / self.bsize)] if len(free_link) >= num: # 여유분이 충분히",
": filename - 파일 이름 # fileformat - 파일 포맷 분석 정보 #",
"= self.bsize fat = self.bbd else: # read_size = self.ssize fat = self.sbd",
"블록을 수집한다. t_link = self.__modify_small_block_link(t_link, t_num) # Small block 갱신 self.bbd_fat = {}",
"len(self.listvirus()) # 진단/치료 가능한 악성코드 수 return info # --------------------------------------------------------------------- # listvirus(self) #",
"ch in och: ret_str += struct.pack('<H', ch) # print ret_str.decode('UTF-16LE', 'replace') return ret_str",
"print hex(t) # BBD List에 BBD 등록하기 for i, no in enumerate(bbd_no): off",
"t += ' - ' if p['Prev'] == 0xffffffff else '%4d ' %",
"변화는 것은 Dec, Inc가 의미 없음 n = (len(data) / self.bsize) + (1",
"특수 블록에 BBD list도 추가 special_no += bbd_no # 특수 블록 처리 (bbd_list_array,",
"0xF0 == 0x00 and ord(pps[1]) == 0x00: name = '_\\x00' + pps[2:t_size-2] else:",
"* self.bsize # 파일 크기 self.mm = self.mm[:size] # 뒤쪽 쓸모 없는 부분은",
"충분히 존재함... return # 추가할 필요 없음 else: # 여유분이 부족함. 따라서 Root를",
"' % p['Dir'] t += ' - ' if p['Start'] == 0xffffffff else",
"Block Overwrite하기 (내장) # --------------------------------------------------------------------- def __set_bblock(self, no, data): off = (no +",
"i) # print hex(off) self.mm = (self.mm[:off] + struct.pack('<L', no) + self.mm[off+4:]) #",
"+ (off * 4) self.mm = self.mm[:t_off] + '\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:] # print",
"= pics.read() d = zlib.decompress(d, -15) d = d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') # var ->",
"# 플러그인 엔진 종료 성공 # --------------------------------------------------------------------- # getinfo(self) # 플러그인 엔진의 주요",
"'' t += ' - ' if p['Prev'] == 0xffffffff else '%4d '",
"+ self.bsize:] if __name__ == '__main__': # import zlib # o = OleFile('normal.hwp',",
"n in enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return org_link_list",
"zfile return zfile # --------------------------------------------------------------------- # arclist(self, filename, fileformat) # 압축 파일 내부의",
"if ord(pps[0]) & 0xF0 == 0x00 and ord(pps[1]) == 0x00: name = '_\\x00'",
"'' for i in range(num_of_xbbd_blocks-1): t_data = get_bblock(self.mm, next_b, self.bsize) next_b = kavutil.get_uint32(t_data,",
"정상적인 PPS가 없음 return False while len(f): x = f.pop(0) try: if self.pps[x]['Type']",
"링크 줄이기 # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS",
"pps = self.root[i*0x80:(i+1)*0x80] t_size = min(kavutil.get_uint16(pps, 0x40), 0x40) if t_size != 0: #",
"SBD를 배열로 바꾸기 t_link = [] for i in range(len(self.sbd) / 4): t_link.append(kavutil.get_uint32(self.sbd,",
"+= struct.pack('<L', last_no+1) # 다음 블록을 가리켜야 함으로 1를 더함 else: x_data +=",
"= [i for i, no in enumerate(sbd_link) if (no == 0xffffffff and i",
"적용하기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) for i, n in",
"len(data): # raise error('Not Support : SBD -> SBD (Dec)') # 지원 완료",
"+ self.mm[off + self.bsize:] # XBBD 생성하기 for i in range(x_num): x_data +=",
"ret_link = old_link + free_link[:add_num] # 최종 결과의 SBD 링크 t_link = old_link[-1:]",
"= self.pps[del_no] prev_no = del_pps['Prev'] next_no = del_pps['Next'] dir_no = del_pps['Dir'] # root를",
"IOError: pass return False # --------------------------------------------------------------------- # OleFile 클래스 # --------------------------------------------------------------------- class OleFile:",
"for i in t[1:]: t_link[i] = 0xffffffff # SBD 배열을 SBD 버퍼로 바꾸기",
"kavutil.get_uint32(bbd_list_array, i * 4) data = bbd[i * self.bsize:(i + 1) * self.bsize]",
"== 1 and storages: ret.append(p['Name']) else: pass return ret # --------------------------------------------------------------------- # 스트림이",
"num * self.ssize # 추가해야 할 용량 add_big_num = (size / self.bsize) +",
"self.pps = pps self.bsize = bsize self.ssize = ssize self.bbd = bbd self.bbd_fat",
"next_b == 0xfffffffe: break if len(ret) % 10000 == 0: if next_b in",
"self.bsize) # 새로운 Small Block 링크가 필요하다 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small",
"Small Block의 링크를 구함 t_link = get_block_link(r_no, self.bbd_fat) # 이전 Small Block의 링크를",
"streams: ret.append(p['Name']) elif p['Type'] == 1 and storages: ret.append(p['Name']) else: pass return ret",
"get_bbd_list_array(self.mm, self.verbose) ''' # 상당히 많은 데이터가 출력되어 주석 처리 if self.verbose: print",
"Tree의 유효성을 체크한다. (내장) # --------------------------------------------------------------------- def __valid_pps_tree(self): scaned_pps_node = [0] # 이미",
"추가하기 self.mm += '\\x00' * self.bsize * num # 실제 필요한 데이터 블록",
"# --------------------------------------------------------------------- def init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화 self.handle = {}",
"break else: old_b_num = b_num total_bbd_num = old_num_bbd + b_num # 전체 BBD",
"블록에 BBD list도 추가 special_no += bbd_no # 특수 블록 처리 (bbd_list_array, num_of_bbd_blocks,",
"유효한 PPS만 처리함 return 0 pps_name = self.pps[node]['Name'].encode('cp949', 'ignore') name = prefix +",
"가짐 # 미리 분석된 파일 포맷중에 OLE 파일 포맷이 있는가? if 'ff_ole' in",
"def is_olefile(filename): try: buf = open(filename, 'rb').read(8) if buf == 'D0CF11E0A1B11AE1'.decode('hex'): return True",
"블록 개수만 파일 뒤에 추가하기 self.mm += '\\x00' * self.bsize * num #",
"4) self.bbd_fat[i] = n self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize",
"n sbd += '\\xff' * t if self.verbose: open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd) # 수정된",
"get_bbd_list_array(self.mm) bb_num = (self.bsize/4) # 한개의 BBD list 블록에 들어갈 수 있는 Big",
"ret.append(p['Name']) elif p['Type'] == 1 and storages: ret.append(p['Name']) else: pass return ret #",
"# getinfo(self) # 플러그인 엔진의 주요 정보를 알려준다. (제작자, 버전, ...) # 리턴값",
"list 개수는 한개의 BBD에는 bsize / 4 개수만큼 Big Block을 담을 수 있음",
"old_num_bbd + b_num > 109: t_num = (old_num_bbd + b_num - 109) total_xbbd_num",
"self.mm[:0x48] + data + self.mm[0x4C:] # XBBD 블록 연결 next_b = xbbd_start_block if",
"+ self.mm[off+self.bsize:] # --------------------------------------------------------------------- # BBD를 수정한다. # bbd : 수정된 BBD 이미지",
"== 0x90900000: # CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return False else: # CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347')",
"바꾸기 t_link = [] for i in range(len(self.bbd) / 4): t_link.append(kavutil.get_uint32(self.bbd, i *",
"if size >= 0x1000: t_list = list(list_array) while len(t_list): s, e = self.get_liner_value(t_list)",
"* bsize + (off * 4) # --------------------------------------------------------------------- # OLE 파일인지 확인한다. #",
"XBBD 개수 add_num += x_num b_num = (add_num / (self.bsize / 4)) +",
"= '' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm,",
"(len(self.bbd) / 4 - 1) - last_no if n >= num: # 잔여",
"def init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화 self.handle = {} self.verbose =",
"= True continue else: while loop: if e == num_list.pop(0): break end =",
"# --------------------------------------------------------------------- # __get_handle(self, filename) # 압축 파일의 핸들을 얻는다. # 입력값 :",
"있는 잔여 데이터 # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _ =",
"+ 4:] if pps_next is not None: t_off = off + 0x48 buf",
"write_mode=True, verbose=True) # o.test() ''' # 늘어나는건 경우의 수가 너무 많음 o =",
"pps['Start'] size = pps['Size'] if size >= 0x1000: self.read_size = self.parent.bsize self.fat =",
"if p['Start'] == 0xffffffff else '%8X ' % p['Start'] tname = p['Name'].encode(sys.stdout.encoding, 'replace')",
"# 필요한 개수로 링크 줄이기 # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data,",
"개수만큼 Big Block을 담을 수 있음 b_num = (add_num / (self.bsize/4)) + (1",
"for i, no in enumerate(sbd_link) if (no == 0xffffffff)] if old_link: ret_link =",
"= buf[:t_off] + struct.pack('<L', size) + buf[t_off + 4:] if start is not",
"link 추가 요청한다. (원본 이미지의 SBD link가 수정 됨) # old_link : 기존",
"Support : BBD -> SBD') # 섹터가 변화는 것은 Dec, Inc가 의미 없음",
"in enumerate(sbd_link) if (no == 0xffffffff and i < r_size / self.ssize)] if",
"메시지 출력 함수 # ------------------------------------------------------------------------- __version__ = '1.0' # ------------------------------------------------------------------------- # 엔진 오류",
"filename, fileformat): file_scan_list = [] # 검사 대상 정보를 모두 가짐 # 미리",
"else: # read_size = self.ssize fat = self.sbd # org_list_array = get_block_link(org_sb, fat)",
"i, n in enumerate(t_link): off = (self.small_block[n / 8] + 1) * self.bsize",
"self.__set_pps_header(root_no, pps_dir=t_no) # 삭제 노드 값은 모두 지우기 self.__set_pps_header(del_no, size=0, start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff,",
"off + 0x44 buf = buf[:t_off] + struct.pack('<L', pps_prev) + buf[t_off + 4:]",
"mkarc(self, arc_engine_id, arc_name, file_infos) # 입력값 : arc_engine_id - 압축 가능 엔진 ID",
"--------------------------------------------------------------------- def init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화 self.handle = {} self.verbose",
"[i for i, no in enumerate(sbd_link) if (no == 0xffffffff and i <",
"# num : 추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_small_block_num(self, num): root",
"__add_small_block_num(self, num): root = self.pps[0] r_size = root['Size'] r_no = root['Start'] # SBD",
"print kavutil.vprint('SBD') kavutil.vprint(None, 'Start Blocks', '%d' % sbd_startblock) kavutil.vprint(None, 'Num of SBD Blocks',",
"o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'], o.sbd) # d2 = pics.read() o.close() ''' # XBBD 늘어나는",
"if d[:0x11] == 'HWP Document File': val = ord(d[0x24]) ret['ff_hwp'] = {'compress': (val",
": 필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_sbd_link(self, org_link_list, num_link): if",
"else: t_data = '' for i in range(num_of_xbbd_blocks-1): t_data = get_bblock(self.mm, next_b, self.bsize)",
"0xffffffff: # 더이상 오른쪽이 없으면 탐색 종료 break else: # 항상 오른쪽 노드가",
"for i in range(len(num_list)): num_list.pop(0) end = e return start, end def read(self):",
"% (self.pps.index(p), tname, p['Type'], t, p['Size']) # PPS 전체 경로 구하기 self.__deep =",
"self.bsize if len(data) == self.bsize: self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] return",
"# 양쪽 모두 노트가 존재함 # 1. prev 노드 값을 root로 보낸다. t_no",
"_ = get_bbd_list_array(self.mm) bb_num = (self.bsize/4) # 한개의 BBD list 블록에 들어갈 수",
"상당히 많은 데이터가 출력되어 주석 처리 if self.verbose: print if num_of_bbd_blocks < 109:",
"enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return org_link_list else: raise",
"# BBD에 링크 연결하기 else: # 이전 링크가 없다면... ret_link = free_link[:add_num] #",
"/ 2): wch.append(kavutil.get_uint16(name, i * 2)) for ch in wch: if 0x3800 <=",
"값을 root로 보낸다. t_no = prev_no elif prev_no == 0xffffffff and next_no !=",
"사용하지 않는 BBD 링크를 찾는다. free_link = [i for i, no in enumerate(bbd_link)",
"'[-] rname :', o.write_stream(a_name, buf) # zfile.writestr(a_name, buf) else: # 삭제 처리 o.delete(a_name)",
"off = (s + 1) * self.read_size data += self.parent.mm[off:off + self.read_size *",
"= (next_b + 1) * self.bsize # t_data의 위치 self.mm = self.mm[:off] +",
"self.root_list_array[node / 4] buf = get_bblock(self.mm, n, self.bsize) off = ((node % 4)",
"= {} for i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i]",
"리턴한다. # TODO : 임시로 작성한거라 최적화 필요함 def get_liner_value(self, num_list): start =",
"sbd = self.sbd for no in t_link: sbd = sbd[:no*4] + '\\xff\\xff\\xff\\xff' +",
"= bbd[:no*4] + data + bbd[(no+1)*4:] no = t_link[-1] bbd = bbd[:no *",
"/ ((bsize / 4) - 1)) + (1 if (t_idx % ((bsize /",
"data + self.mm[off + self.bsize:] if __name__ == '__main__': # import zlib #",
"if size >= 0x1000: self.read_size = self.parent.bsize self.fat = self.parent.bbd_fat else: self.read_size =",
"kavutil.get_uint32(pps, 0x48) p['Dir'] = kavutil.get_uint32(pps, 0x4c) p['Start'] = kavutil.get_uint32(pps, 0x74) p['Size'] = kavutil.get_uint32(pps,",
"2 and len(self.pps[x]['Name']) == 0: continue except IndexError: if (x & 0x90900000) ==",
"SBD 덮어쓰기 sbd_no = kavutil.get_uint32(self.mm, 0x3c) # sbd_list_array = get_block_link(sbd_no, self.bbd) sbd_list_array =",
"vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return vlist # --------------------------------------------------------------------- # format(self, filehandle, filename, filename_ex) #",
"self.mm[0x4C:] # XBBD 블록 연결 next_b = xbbd_start_block if num_of_xbbd_blocks == 1: t_data",
"idx >= num_of_bbd_blocks: # 범위를 벗어나면 에러 return -1 if idx <= 109:",
"characters ch -= 0x3800 och.append(MsiBase64Encode(ch & 0x3f)) ch = MsiBase64Encode(((ch >> 6) &",
"XBBD 마지막에 새로운 XBBD 링크 추가 t_data = t_data[:-4] + struct.pack('<L', last_no) off",
"'\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:] # print repr(self.mm[t_off:t_off+4]) # t = get_bblock(self.mm, t_no, self.bsize) #",
"next_b, bsize) bbd_list_array += t_data[:-4] next_b = kavutil.get_uint32(t_data, bsize-4) return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks,",
"get_block_link(r_no, self.bbd) # 이전 Small Block의 링크를 구함 t_link = get_block_link(r_no, self.bbd_fat) #",
"_ = get_bbd_list_array(self.mm) # BBD를 모은다 bbd = '' for i in range(num_of_bbd_blocks):",
"filename_ex) # 파일 포맷을 분석한다. # 입력값 : filehandle - 파일 핸들 #",
"0x1e) self.ssize = 1 << kavutil.get_uint16(self.mm, 0x20) if self.verbose: kavutil.vprint('Header') kavutil.vprint(None, 'Big Block",
"+ 1) * 4:] # SBD가 나누어 bsize 단위가 아니면 맞춘다. n =",
"# --------------------------------------------------------------------- # OLE 내부 링크 구하기 # --------------------------------------------------------------------- def get_block_link(no, bbd_or_sbd_fat): ret",
"+ bbd[(no+1)*4:] self.__modify_bbd(bbd) else: # 기존에는 SBD 사용 if org_size >= len(data): #",
"%8X %8X %8X %8X %8d' % (self.pps.index(p), p['Name'], p['Type'], p['Prev'], p['Next'], p['Dir'], p['Start'],",
"struct.pack('<L', total_xbbd_num) self.mm = self.mm[:0x48] + data + self.mm[0x4C:] # XBBD 블록 연결",
"- 4) / 4)) + (1 if (t_num % ((self.bsize - 4) /",
"--------------------------------------------------------------------- def write_stream(self, name, data): for p in self.__full_list: if p['Name'] == name:",
"END of XBBD # BBD 추가하기 bbd_no = [] b_data = '\\xff' *",
"file_info.get_filename_in_archive() try: if os.path.exists(rname): with open(rname, 'rb') as fp: buf = fp.read() #",
"이외의 값 - 실패 # --------------------------------------------------------------------- def uninit(self): # 플러그인 엔진 종료 return",
"= t_data[:-4] + struct.pack('<L', last_no) off = (next_b + 1) * self.bsize #",
"self.bbd_fat) self.sbd = '' for no in sbd_list_array: self.sbd += get_bblock(self.mm, no, self.bsize)",
"block 링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_small_bolck(self, t_data, t_link): for",
"= sbd_fat self.root_list_array = root_list_array self.small_block = small_block def __get_root_node(self, node): # 해당",
"if reset_stream: size = target_pps['Size'] t = ow.write(no, '\\x00' * size) # 모든",
"# raise error('Not Support : BBD -> BBD (Dec)') # 개발 완료 n",
"# BBD에 링크 연결하기 for i in range(len(t_link)-1): no = t_link[i+1] data =",
"self.pps = None self.small_block = None self.root_list_array = None self.exploit = [] #",
"가능한 악성코드의 리스트를 알려준다. # 리턴값 : 악성코드 리스트 # --------------------------------------------------------------------- def listvirus(self):",
"OleFile: def __init__(self, input_data, write_mode=False, verbose=False): self.verbose = verbose # 디버깅용 self.isfile =",
"# SBD 링크를 줄인다 # org_link_list : 기존 Small block 링크 # num_link",
"no, self.bsize) if self.verbose: open('root.dmp', 'wb').write(self.root) print kavutil.vprint('ROOT') kavutil.vprint(None, 'Start Blocks', '%d' %",
"부분은 제거 attach_data = self.mm[size:] # 파일 뒤에 붙어 있는 잔여 데이터 #",
"마지막 Big Block 번호 n = (len(self.bbd) / 4 - 1) - last_no",
"= get_bbd_list_array(self.mm) # BBD를 모은다 bbd = '' for i in range(num_of_bbd_blocks): no",
"data[:size] def close(self): pass # ----------------------------------------------------------------- for p in self.__full_list: if p['Name'] ==",
"바꾸기 self.sbd = '' for i in t_link: self.sbd += struct.pack('<L', i) #",
"1) * self.bsize off += (n % 8) * self.ssize self.mm = self.mm[:off]",
"모두 0xffffffff로 설정하기 for i in t[1:]: t_link[i] = 0xffffffff # SBD 배열을",
"= min(kavutil.get_uint16(pps, 0x40), 0x40) if t_size != 0: # 출력시 이름이 깨질 가능성이",
"= None self.ssize = None self.bbd_list_array = None self.bbd = None self.bbd_fat =",
"num_of_xbbd_blocks # 추가해야 할 XBBD 개수 # XBBD를 위한 헤더 수정 if num_of_xbbd_blocks",
"링크 t_link = old_link[-1:] + free_link[:add_num] # BBD에 링크 연결하기 else: # 이전",
"vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는 악성코드 이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return vlist # ---------------------------------------------------------------------",
"= None self.init(buf) def init(self, buf): # OLE 주요 데이터 self.mm = buf",
"기존에는 BBD 사용 # raise error('Not Support : BBD -> SBD') # 섹터가",
"+ '\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:] # print repr(self.mm[t_off:t_off+4]) # t = get_bblock(self.mm, t_no, self.bsize)",
"= (len(self.mm) / self.bsize) * self.bsize # 파일 크기 self.mm = self.mm[:size] #",
"attach_data # 특수 블록에 BBD list도 추가 special_no += bbd_no # 특수 블록",
"= parent self.node = node self.read_size = 0 self.fat = None # print",
"등록 last_no += 1 # END of XBBD # BBD 추가하기 bbd_no =",
"else: pass return ret # --------------------------------------------------------------------- # 스트림이 존재하는가? # --------------------------------------------------------------------- def exists(self,",
"else: no = -1 if no == -1: raise Error('PPS name(%s) is invalid.'",
"SBD link 개수 # --------------------------------------------------------------------- def __modify_small_block_link(self, old_link, add_num): if add_num < 0:",
"= org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]] = 0xfffffffe # 링크 끝 설정하기 #",
"[] b_data = '\\xff' * self.bsize * b_num for i in range(b_num): bbd_no.append(last_no)",
">= num: # 잔여 개수가 추가하려는 개수보다 많거나 같으면 추가 블록 개수만 파일",
"t_num) # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기",
"def MsiBase64Encode(x): ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x > 63: return None return ord(ct[x])",
"= [] # 검사 대상 정보를 모두 가짐 # 미리 분석된 파일 포맷중에",
"try: data = fp.read() except: data = None return data # --------------------------------------------------------------------- #",
"else: buf = input_data else: raise Error('Input data is invalid.') # 수정 모드",
"- 성공, 0 이외의 값 - 실패 # --------------------------------------------------------------------- def uninit(self): # 플러그인",
"sbd[i*self.bsize:(i+1)*self.bsize] off = (no + 1) * self.bsize self.mm = self.mm[:off] + data",
"= [] for i in range(len(self.sbd) / 4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) # 사용하지 않는",
"# --------------------------------------------------------------------- # init(self, plugins_path) # 플러그인 엔진을 초기화 한다. # 인력값 :",
"else 0) t_data = data + ('\\x00' * ((n * self.bsize) - len(data)))",
"데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_big_block(self, t_data, t_link): for i, n in",
"- len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data) / self.ssize",
"= get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 sbd = self.sbd for no in",
"if len(ret) % 10000 == 0: if next_b in ret: # 이미 링크가",
": BBD -> BBD (Inc)') n = (len(data) / self.bsize) + (1 if",
"# SBD를 사용한다. if org_size >= 0x1000: # 기존에는 BBD 사용 # raise",
"'\\x00' * 0x80 elif del_info: buf = buf[:off] + '\\x00' * 0x80 +",
"결과의 SBD 링크 t_link = old_link[-1:] + free_link[:add_num] # SBD에 링크 연결하기 else:",
"경로 구하기 (스트림만 출력) # --------------------------------------------------------------------- def listdir(self, streams=True, storages=False): ret = []",
"# big block, small bloc 크기 구하기 self.bsize = 1 << kavutil.get_uint16(self.mm, 0x1e)",
"bsize): off = (no+1) * bsize return buf[off:off+bsize] # --------------------------------------------------------------------- # OLE의 BBD",
"get_block_link(org_sb, self.sbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크",
"수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 SBD의 링크는 모두 삭제한다.",
"start: start = num_list.pop(0) e = start loop = False for x in",
"('\\x00' * ((n*self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link =",
"wch: if 0x3800 <= ch <= 0x4840: if ch >= 0x4800: # 0x4800",
"4) / 4)) + (1 if (t_num % ((self.bsize - 4) / 4))",
"== 0xffffffff else '%4d ' % p['Dir'] t += ' - ' if",
"+ self.mm[off+self.bsize:] return True return False # --------------------------------------------------------------------- # PPS 헤더에 존재하는 특정",
"4): t_link.append(kavutil.get_uint32(self.bbd, i * 4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]] =",
"플러그인 엔진을 초기화 한다. # 인력값 : plugins_path - 플러그인 엔진의 위치 #",
"이전 링크 수집하기 sbd = self.sbd for no in t_link: sbd = sbd[:no*4]",
"prev 노드 하위에 next가 없는 node를 찾아서 del_pps의 next_no를 등록한다. blank_next_no = self.__get_max_node(prev_no)",
"None self.pps = None self.small_block = None self.root_list_array = None self.exploit = []",
"# --------------------------------------------------------------------- # arclist(self, filename, fileformat) # 압축 파일 내부의 파일 목록을 얻는다.",
"in list_array: div_n = self.parent.bsize / self.parent.ssize off = (self.parent.small_block[n / div_n] +",
"data += self.parent.mm[off:off + self.read_size * (e - s + 1)] else: for",
"write_mode=True, verbose=True) print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') # Root 수정, Next 수정 o.close() ''' o",
"여유분이 충분히 존재함... return # 추가할 필요 없음 # 잔여 개수 체크하기 last_no",
"self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) t = ow.write(no, data) if",
"# BBD 배열을 BBD 버퍼로 바꾸기 self.bbd = '' for i in t_link:",
"of XBBD Blocks', '%d' % num_of_xbbd_blocks) if num_of_bbd_blocks > 109: # bbd list",
"= MsiBase64Encode(((ch >> 6) & 0x3f)) och.append(ch) ret_str = '' for ch in",
"- (len(t_link) * self.ssize) t_num = (t_size / self.ssize) + (1 if (t_size",
"# --------------------------------------------------------------------- # OleWriteStream 클래스 # --------------------------------------------------------------------- class OleWriteStream: def __init__(self, mm, pps,",
"= {} self.sbd = None self.root = None self.pps = None self.small_block =",
"압축 엔진 ID # arc_name - 압축 파일 # fname_in_arc - 압축 해제할",
"a_name = file_info.get_filename_in_archive() try: if os.path.exists(rname): with open(rname, 'rb') as fp: buf =",
"% p['Dir'] t += ' - ' if p['Start'] == 0xffffffff else '%8X",
"self.bbd_fat[i] = n if self.verbose: open('bbd.dmp', 'wb').write(self.bbd) print kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd, 0, 0x80)",
"ret.append(p['Name']) else: pass return ret # --------------------------------------------------------------------- # 스트림이 존재하는가? # --------------------------------------------------------------------- def",
"- len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 # t_link = get_block_link(org_sb, self.sbd)",
"Block 개수 self.__add_big_block_num(add_big_num) # Big Block 추가 요청 # t_link = get_block_link(r_no, self.bbd)",
"% 0x200 != 0 or self.ssize != 0x40: # 이상 파일 정보 처리",
"b_data = '\\xff' * self.bsize * b_num for i in range(b_num): bbd_no.append(last_no) last_no",
"-= 1 if self.pps[node]['Prev'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix) if self.pps[node]['Next'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'],",
"이름이 깨질 가능성이 큼 if ord(pps[0]) & 0xF0 == 0x00 and ord(pps[1]) ==",
"ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD를 수정한다. # sbd : 수정된",
"get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS",
"Big Block 개수 # --------------------------------------------------------------------- def __add_small_block_num(self, num): root = self.pps[0] r_size =",
"('-' * 74) for p in self.pps: if p['Valid'] is False: # 유효한",
"raise Error('Invalid call') # --------------------------------------------------------------------- # Big Block을 주어진 개수만큼 추가한다. # num",
"# 남은 링크는 모두 0xffffffff로 설정하기 for i in t[1:]: t_link[i] = 0xffffffff",
"Block의 링크를 구함 t_link = get_block_link(r_no, self.bbd_fat) # 이전 Small Block의 링크를 구함",
"추가해야 하나? t_size = len(t_data) - (len(t_link) * self.bsize) t_num = (t_size /",
": 시작 링크 # --------------------------------------------------------------------- def __set_pps_header(self, node, size=None, start=None, pps_prev=None, pps_next=None, pps_dir=None,",
"등록하기 for i, no in enumerate(bbd_no): off = get_bbd_list_index_to_offset(self.mm, old_num_bbd + i) #",
"num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd, 0, 0x80) # PPS 읽기 self.pps = [] for i",
"= b_num total_bbd_num = old_num_bbd + b_num # 전체 BBD list 개수 self.mm",
"self.init(self.mm) # return ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat,",
": SBD -> BBD') # 섹터가 변화는 것은 Dec, Inc가 의미 없음 n",
"74) for p in self.pps: if p['Valid'] is False: # 유효한 Tree가 아니면",
"추가할 필요 없음 # 잔여 개수 체크하기 last_no = (size / self.bsize) -",
"no in enumerate(sbd_link) if (no == 0xffffffff and i < r_size / self.ssize)]",
"return False while len(f): x = f.pop(0) try: if self.pps[x]['Type'] != 1 and",
"return self.mm # --------------------------------------------------------------------- # 특정 데이터를 big block 링크를 따라 데이터 쓰기",
"4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link = [i for",
"* self.bsize * b_num for i in range(b_num): bbd_no.append(last_no) last_no += 1 #",
"PPS에 대한 삭제인지 확인 if reset_stream: size = target_pps['Size'] t = ow.write(no, '\\x00'",
"no = kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd += get_bblock(self.mm, no, self.bsize) self.bbd_fat = {} for",
"--------------------------------------------------------------------- def get_block_link(no, bbd_or_sbd_fat): ret = [] fat = bbd_or_sbd_fat next_b = no",
"DecodeStreamName(name): wch = [] och = [] for i in range(len(name) / 2):",
"# CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return False else: # CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return False",
": 수정된 SBD 이미지 # --------------------------------------------------------------------- def __modify_sbd(self, sbd): # 원래 이미지에 SBD",
"[] for i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) # 사용하지 않는 BBD",
"링크를 구함 t_link = get_block_link(r_no, self.bbd_fat) # 이전 Small Block의 링크를 구함 self.__modify_big_block_link(t_link,",
"bbd_list_array.append(kavutil.get_uint32(t, i * 4)) for i, n in enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list",
"fp: buf = fp.read() # print '[-] filename :', rname, len(buf) # print",
"== 'D0CF11E0A1B11AE1'.decode('hex'): return True except IOError: pass return False # --------------------------------------------------------------------- # OleFile",
"in enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return org_link_list else:",
"self.pps[node]['Name'].encode('cp949', 'ignore') name = prefix + '/' + pps_name # print (\"%02d :",
"i < size / self.bsize)] if len(free_link) >= num: # 여유분이 충분히 존재함...",
"free_link[:add_num] # 최종 결과의 BBD 링크 t_link = free_link[:add_num] # BBD에 링크 연결하기",
"self.mm = self.mm[:off] + data + self.mm[off + self.bsize:] if __name__ == '__main__':",
"self.bbd += get_bblock(self.mm, no, self.bsize) self.bbd_fat = {} for i in range(len(self.bbd) /",
"연결된 링크 # --------------------------------------------------------------------- # SBD를 수정한다. # sbd : 수정된 SBD 이미지",
"--------------------------------------------------------------------- def __decrease_sbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: # SBD를 배열로 바꾸기",
"개수 # --------------------------------------------------------------------- def __add_big_block_num(self, num): size = (len(self.mm) / self.bsize) * self.bsize",
"input_data, write_mode=False, verbose=False): self.verbose = verbose # 디버깅용 self.isfile = False # 파일로",
"p['Node'] break else: no = -1 if no == -1: raise Error('PPS name",
"109: t_num = (total_bbd_num - 109) total_xbbd_num = (t_num / ((self.bsize - 4)",
"데이터 self.mm = None self.bsize = None self.ssize = None self.bbd_list_array = None",
"4): no = kavutil.get_uint32(bbd_list_array, i * 4) data = bbd[i * self.bsize:(i +",
"# start : 시작 링크 # --------------------------------------------------------------------- def __set_pps_header(self, node, size=None, start=None, pps_prev=None,",
"클래스 # --------------------------------------------------------------------- class OleFile: def __init__(self, input_data, write_mode=False, verbose=False): self.verbose = verbose",
"raise Error('PPS name is invalid.') return Stream(self, no) # --------------------------------------------------------------------- # 스트림의 데이터를",
"return False # --------------------------------------------------------------------- # 스트림을 연다 # --------------------------------------------------------------------- def openstream(self, name): #",
"buf = input_data else: raise Error('Input data is invalid.') # 수정 모드 self.write_mode",
"< 0: return [] # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _",
"if len(org_link_list) > num_link: # BBD를 배열로 바꾸기 t_link = [] for i",
"Error('PPS name(%s) is invalid.' % name) # self.init(self.mm) # return ow = OleWriteStream(self.mm,",
"--------------------------------------------------------------------- class OleWriteStream: def __init__(self, mm, pps, bsize, ssize, bbd, bbd_fat, sbd, sbd_fat,",
"self.parent.bbd_fat else: self.read_size = self.parent.ssize self.fat = self.parent.sbd_fat list_array = get_block_link(sb, self.fat) data",
"def write(self, no, data): # 기존 PPS 정보를 얻는다 org_sb = self.pps[no]['Start'] org_size",
"delete_storage: # 유효한 스토리지? t = ow.delete(no) # 링크 삭제 if t: self.init(t)",
"* bsize if fsize > rsize: fileformat = { # 포맷 정보를 담을",
"섹터가 변화는 것은 Dec, Inc가 의미 없음 n = (len(data) / self.bsize) +",
"i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4)) # 사용하지 않는 BBD 링크를 찾는다.",
"no in enumerate(bbd_link) if (no == 0xffffffff and i < size / self.bsize)]",
"포맷 분석 정보} or None # --------------------------------------------------------------------- def format(self, filehandle, filename, filename_ex): ret",
"fp.read() # print '[-] filename :', rname, len(buf) # print '[-] rname :',",
"(idx * 4) else: t_idx = idx - 109 seg = (t_idx /",
"no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) if self.verbose: open('bbd.dm2', 'wb').write(bbd)",
"+ data + bbd[(no+1)*4:] no = t_link[-1] bbd = bbd[:no * 4] +",
"fp = o.openstream(fname_in_arc) try: data = fp.read() except: data = None return data",
"분석된 PPS가 없으면 종료 return False if self.pps[0]['Dir'] != 0xffffffff and self.pps[0]['Type'] ==",
"raise error('Not Support : SBD -> BBD') # 섹터가 변화는 것은 Dec, Inc가",
"or None # --------------------------------------------------------------------- def format(self, filehandle, filename, filename_ex): ret = {} mm",
"bbd list 개수가 109보다 크면 xbbd를 가져와야 함 next_b = xbbd_start_block for i",
"/ 4): n = kavutil.get_uint32(self.bbd, i * 4) self.bbd_fat[i] = n self.small_block =",
"않는 SBD 링크를 찾는다. free_link = [i for i, no in enumerate(sbd_link) if",
"* self.bsize self.mm = self.mm[:off] + t_data[i * self.bsize:(i + 1) * self.bsize]",
"1 and delete_storage: # 유효한 스토리지? t = ow.delete(no) # 링크 삭제 if",
"1를 더함 else: x_data += '\\xfe\\xff\\xff\\xff' # 마지막 블록의 링크는 끝을 처리함 special_no.append(last_no)",
"start=t_link[0]) # 이전 SBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.sbd) #",
"SBD link 추가 요청한다. (원본 이미지의 SBD link가 수정 됨) # old_link :",
"/ self.ssize # 몇개의 블록이 필요한가? self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 #",
"o = OleFile(arc_name, write_mode=True) # , verbose=True) # zfile = zipfile.ZipFile(arc_name, 'w') for",
"= del_pps['Prev'] next_no = del_pps['Next'] dir_no = del_pps['Dir'] # root를 찾기 root_no =",
"--------------------------------------------------------------------- def __add_big_block_num(self, num): size = (len(self.mm) / self.bsize) * self.bsize # 파일",
"수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 sbd = self.sbd for",
"pass # small block link 얻기 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) if self.verbose: print",
"self.bsize) print kavutil.HexDump().Buffer(self.mm, (next_b+1) * self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) ''' if len(self.bbd_list_array)/4",
"섹터가 변화는 것은 Dec, Inc가 의미 없음 n = (len(data) / self.ssize) +",
"많음 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('FileHeader') d = pics.read() d",
"리턴한다. # --------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf,",
"# Big Block 추가 요청 # t_link = get_block_link(r_no, self.bbd) # 이전 Small",
"# Dir self.__set_pps_header(root_no, pps_dir=t_no) # 삭제 노드 값은 모두 지우기 self.__set_pps_header(del_no, size=0, start=0xffffffff,",
"해당 블록은 0xfffffffd로 처리해야 함 x_data = '' # b_data = '' #",
"= 0xffffffff else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next'] != 0xffffffff: if self.pps[x]['Next'] in scaned_pps_node:",
"배열로 바꾸기 t_link = [] for i in range(len(self.sbd) / 4): t_link.append(kavutil.get_uint32(self.sbd, i",
"self.sbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_sbd_link(t_link, n) # 필요한 개수로 링크",
"' ' + ('-' * 74) for p in self.pps: if p['Valid'] is",
"p['Next'] t += ' - ' if p['Dir'] == 0xffffffff else '%4d '",
"t_link = old_link[-1:] + free_link[:add_num] # BBD에 링크 연결하기 else: # 이전 링크가",
"/ self.ssize) + (1 if (len(data) % self.ssize) else 0) t_data = data",
"buf = buf[:t_off] + struct.pack('<L', pps_next) + buf[t_off + 4:] if pps_dir is",
"1. 0xffffffff 노드 값을 root로 보낸다. t_no = 0xffffffff # root 노드를 수정한다.",
"0) t_data = data + ('\\x00' * ((n * self.ssize) - len(data))) #",
"# old_link : 기존 BBD link # add_num : 추가 BBD link 개수",
"재로딩 # --------------------------------------------------------------------- # OleWriteStream 클래스 # --------------------------------------------------------------------- class OleWriteStream: def __init__(self, mm,",
"# t_link = get_block_link(org_sb, self.bbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb, self.bbd_fat)",
"len(f) == 0: # 정상적인 PPS가 없음 return False while len(f): x =",
"'%d' % sbd_startblock) kavutil.vprint(None, 'Num of SBD Blocks', '%d' % num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd,",
"node, size=None, start=None, pps_prev=None, pps_next=None, pps_dir=None, del_info=False): n = self.root_list_array[node / 4] buf",
"크기를 조정한다. (내장) # node : PPS 인덱스 # size : 설정 크기",
"while True: try: next_b = fat[next_b] if next_b == 0xfffffffe: break if len(ret)",
"get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 sbd = self.sbd for no in t_link:",
"디버그 모드 (True or False) # 리턴값 : 0 - 성공, 0 이외의",
"seg = no / bb_num off = no % bb_num # print hex(no),",
"open(rname, 'rb') as fp: buf = fp.read() # print '[-] filename :', rname,",
"+ buf[off+0x80:] if size is not None: t_off = off + 0x78 buf",
"list의 index를 Offset으로 리턴한다. # --------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c)",
"True return False # --------------------------------------------------------------------- # PPS 헤더에 존재하는 특정 스트림의 크기를 조정한다.",
"filename_ex): ret = {} mm = filehandle # OLE 헤더와 동일 if mm[:8]",
"BBD를 사용한다. if org_size >= 0x1000: # 기존에는 BBD 사용 if org_size >=",
"= num_list.pop(0) e = start loop = False for x in num_list: if",
"self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) target_pps = self.pps[no] if target_pps['Valid'] and target_pps['Type']",
"+ t_data + self.mm[off + self.bsize:] # XBBD 생성하기 for i in range(x_num):",
"False if self.pps[0]['Dir'] != 0xffffffff and self.pps[0]['Type'] == 5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] =",
"= (len(data) / self.ssize) + (1 if (len(data) % self.ssize) else 0) t_data",
"size = target_pps['Size'] t = ow.write(no, '\\x00' * size) # 모든 데이터를 0으로",
"enumerate(t_link): off = (n + 1) * self.bsize self.mm = self.mm[:off] + t_data[i",
"이름 # 리턴값 : {파일 포맷 분석 정보} or None # --------------------------------------------------------------------- def",
"sbd): # 원래 이미지에 SBD 덮어쓰기 sbd_no = kavutil.get_uint32(self.mm, 0x3c) # sbd_list_array =",
"kavutil.vprint(None, 'Num of SBD Blocks', '%d' % num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd, 0, 0x80) #",
"# 리턴값 : {파일 포맷 분석 정보} or None # --------------------------------------------------------------------- def format(self,",
"' - ' if p['Dir'] == 0xffffffff else '%4d ' % p['Dir'] t",
"o = self.__get_handle(arc_name) fp = o.openstream(fname_in_arc) try: data = fp.read() except: data =",
"else 0) t_data = data + ('\\x00' * ((n*self.ssize) - len(data))) # 여분의",
"num : 추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_small_block_num(self, num): root =",
"# arcclose(self) # 압축 파일 핸들을 닫는다. # --------------------------------------------------------------------- def arcclose(self): for fname",
"'wb').write(self.bbd) print kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd, 0, 0x80) # Root 읽기 root_startblock = kavutil.get_uint32(self.mm,",
"self.parent.ssize data += self.parent.mm[off:off + self.read_size] if self.parent.verbose: print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0, 80)",
"수정 self.__set_pps_header(no, size=len(data)) return self.mm # --------------------------------------------------------------------- # 특정 데이터를 big block 링크를",
"추가하기 # 수집된 마지막 링크 이후에 존재하는 사용하지 않는 블록을 수집한다. t_link =",
"+ '\\xfe\\xff\\xff\\xff' + sbd[(no + 1) * 4:] # SBD가 나누어 bsize 단위가",
"next_no를 등록한다. blank_next_no = self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no) elif prev_no != 0xffffffff and next_no",
"0 # 임시 변수 self.__deep = 0 self.__full_list = [] self.parse() # OLE",
"--------------------------------------------------------------------- # arclist(self, filename, fileformat) # 압축 파일 내부의 파일 목록을 얻는다. #",
"+ struct.pack('<L', pps_dir) + buf[t_off + 4:] self.__set_bblock(n, buf) if self.verbose: print buf",
"pps['Next'] return no def delete(self, del_no): del_pps = self.pps[del_no] prev_no = del_pps['Prev'] next_no",
"print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') # Root 수정, Next 수정 o.close() ''' o = OleFile('normal.hwp',",
"/ 4): t_link.append(kavutil.get_uint32(self.sbd, i * 4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]]",
"(t_num / ((self.bsize - 4) / 4)) + (1 if (t_num % ((self.bsize",
"verbose): self.verbose = verbose self.mm = mm self.pps = pps self.bsize = bsize",
"개발 완료 n = (len(data) / self.bsize) + (1 if (len(data) % self.bsize)",
"get_block_link(self.pps[0]['Start'], self.bbd_fat) if self.verbose: print kavutil.vprint('Small Blocks') print self.small_block return True # ---------------------------------------------------------------------",
"file_info.get_filename() a_name = file_info.get_filename_in_archive() try: if os.path.exists(rname): with open(rname, 'rb') as fp: buf",
"pps_dir) + buf[t_off + 4:] self.__set_bblock(n, buf) if self.verbose: print buf = get_bblock(self.mm,",
"self.read_size data += self.parent.mm[off:off + self.read_size * (e - s + 1)] else:",
"root_startblock) print kavutil.HexDump().Buffer(self.root, 0, 0x80) # sbd 읽기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks",
"try: next_b = fat[next_b] if next_b == 0xfffffffe: break if len(ret) % 10000",
"없다면... ret_link = free_link[:add_num] # 최종 결과의 BBD 링크 t_link = free_link[:add_num] #",
"old_num_bbd + b_num # 전체 BBD list 개수 self.mm = self.mm[:0x2c] + struct.pack('<L',",
"인덱스 # size : 설정 크기 # start : 시작 링크 # ---------------------------------------------------------------------",
"del_no: self.__set_pps_header(root_no, pps_prev=t_no) elif pps['Next'] == del_no: self.__set_pps_header(root_no, pps_next=t_no) else: # Dir self.__set_pps_header(root_no,",
"= 0xffffffff # BBD 배열을 BBD 버퍼로 바꾸기 self.bbd = '' for i",
"# 이상 파일 정보 처리 return False # bbd 읽기 self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks,",
"+= attach_data else: special_no = [] # 특수 목적의 Big Block 번호. 해당",
"t_data = '' for i in range(num_of_xbbd_blocks-1): t_data = get_bblock(self.mm, next_b, self.bsize) next_b",
"Inc가 의미 없음 n = (len(data) / self.bsize) + (1 if (len(data) %",
"error('Not Support : SBD -> SBD (Dec)') # 지원 완료 n = (len(data)",
"= next_no else: # prev_no == 0xffffffff and next_no == 0xffffffff: # 단일",
"num_link: # SBD를 배열로 바꾸기 t_link = [] for i in range(len(self.sbd) /",
"--------------------------------------------------------------------- def __modify_big_block_link(self, old_link, add_num): if add_num < 0: return [] # 전체",
"== 0xffffffff else '%8X ' % p['Start'] tname = p['Name'].encode(sys.stdout.encoding, 'replace') print '",
"----------------------------------------------------------------- for p in self.__full_list: if p['Name'] == name: no = p['Node'] break",
"배열을 BBD 버퍼로 바꾸기 self.bbd = '' for i in t_link: self.bbd +=",
"# 여유분이 충분히 존재함... return # 추가할 필요 없음 else: # 여유분이 부족함.",
"{} self.verbose = verbose return 0 # 플러그인 엔진 초기화 성공 # ---------------------------------------------------------------------",
"pps_name = self.pps[node]['Name'].encode('cp949', 'ignore') name = prefix + '/' + pps_name # print",
"추가 special_no += bbd_no # 특수 블록 처리 (bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block) bbd_list_array,",
"p['Next'], p['Dir'], p['Start'], p['Size']) ''' print ' %-2s %-32s %4s %-4s %-4s %-4s",
"i*4) self.bbd += get_bblock(self.mm, n, self.bsize) # 새로운 Small Block 링크가 필요하다 self.small_block",
"링크는 모두 0xffffffff로 설정하기 for i in t[1:]: t_link[i] = 0xffffffff # SBD",
"getinfo(self): # 플러그인 엔진의 주요 정보 info = dict() # 사전형 변수 선언",
"# 진단 가능한 악성코드 리스트 vlist = list() # 리스트형 변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158')",
"<< kavutil.get_uint16(self.mm, 0x20) if self.verbose: kavutil.vprint('Header') kavutil.vprint(None, 'Big Block Size', '%d' % self.bsize)",
"/ 4): n = kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i] = n if self.verbose: open('sbd.dmp', 'wb').write(self.sbd)",
"0: # 분석된 PPS가 없으면 종료 return False if self.pps[0]['Dir'] != 0xffffffff and",
"ret_str += struct.pack('<H', ch) # print ret_str.decode('UTF-16LE', 'replace') return ret_str # --------------------------------------------------------------------- #",
"BBD를 배열로 바꾸기 t_link = [] for i in range(len(self.bbd) / 4): t_link.append(kavutil.get_uint32(self.bbd,",
"= self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no) elif prev_no != 0xffffffff and next_no == 0xffffffff: #",
"OLE 주요 데이터 self.mm = None self.bsize = None self.ssize = None self.bbd_list_array",
"플러그인 엔진의 위치 # verbose - 디버그 모드 (True or False) # 리턴값",
"len(ret) % 10000 == 0: if next_b in ret: # 이미 링크가 존재하면",
"x_num = total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수 # XBBD를 위한",
"t_num) bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) self.bbd = '' for i in",
"+ self.read_size * (e - s + 1)] else: for n in list_array:",
"# 추가해야 할 XBBD 개수 add_num += x_num b_num = (add_num / (self.bsize",
"0xffffffff: # Next만 존재 # 1. next 노드 값을 root로 보낸다. t_no =",
"bbd_or_sbd_fat next_b = no if next_b != 0xfffffffe: ret.append(next_b) while True: try: next_b",
"진단 가능한 악성코드 리스트 vlist = list() # 리스트형 변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158') #",
"위해 준비한다 if len(data) >= 0x1000: # BBD를 사용한다. if org_size >= 0x1000:",
"0 or self.ssize != 0x40: # 이상 파일 정보 처리 return False #",
"range(len(self.sbd) / 4): t_link.append(kavutil.get_uint32(self.sbd, i * 4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link]",
"이전 Small Block의 링크를 구함 t_link = get_block_link(r_no, self.bbd_fat) # 이전 Small Block의",
"return None return ord(ct[x]) def DecodeStreamName(name): wch = [] och = [] for",
"def delete(self, name, delete_storage=False, reset_stream=False): for p in self.__full_list: if p['Name'] == name:",
"= sbd self.sbd_fat = sbd_fat self.root_list_array = root_list_array self.small_block = small_block def __get_root_node(self,",
"open('bbd.dm2', 'wb').write(bbd) bbd_link = [] for i in range(len(bbd) / 4): bbd_link.append(kavutil.get_uint32(bbd, i*4))",
"+ 1) * self.parent.bsize off += (n % div_n) * self.parent.ssize data +=",
"* ((n*self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb,",
"0xfffffffe: return -1 t_buf = get_bblock(buf, next_b, bsize) next_b = kavutil.get_uint32(t_buf, bsize-4) return",
"# XBBD 생성하기 for i in range(x_num): x_data += '\\xff\\xff\\xff\\xff' * ((self.bsize/4) -",
"sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks = kavutil.get_uint32(self.mm, 0x40) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) self.sbd",
"크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support : SBD -> SBD",
"== name: return True else: return False # --------------------------------------------------------------------- # 스트림을 연다 #",
"취약점 존재 여부 # 임시 변수 self.__deep = None self.__full_list = None self.init(buf)",
"버퍼로 바꾸기 self.bbd = '' for i in t_link: self.bbd += struct.pack('<L', i)",
"# BBD를 수정한다. # bbd : 수정된 BBD 이미지 # --------------------------------------------------------------------- def __modify_bbd(self,",
"range(len(t_link)-1): no = t_link[i+1] data = struct.pack('<L', no) no = t_link[i] sbd =",
"# 연결된 링크 # --------------------------------------------------------------------- # SBD link 추가 요청한다. (원본 이미지의 SBD",
"t_link = get_block_link(org_sb, self.bbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb, self.bbd_fat) #",
"self.verbose: print if num_of_bbd_blocks < 109: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 4) else: kavutil.HexDump().Buffer(self.mm,",
"- (len(t_link) * self.bsize) t_num = (t_size / self.bsize) + (1 if (t_size",
"i in range(len(self.sbd) / 4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다.",
"['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if pps[0x50:0x60] in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return False",
"kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1",
"is False: return False if self.verbose: print kavutil.vprint('Property Storage') ''' print ' %-2s",
"필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_sbd_link(self, org_link_list, num_link): if len(org_link_list)",
"get_block_link(root_startblock, self.bbd_fat) self.root_list_array = root_list_array self.root = '' for no in root_list_array: self.root",
"DecodeStreamName(name).decode('UTF-16LE', 'replace') else: p['Name'] = '' p['Type'] = ord(pps[0x42]) p['Prev'] = kavutil.get_uint32(pps, 0x44)",
"이미지의 BBD link가 수정 됨) # old_link : 기존 BBD link # add_num",
"data + bbd[(no+1)*4:] no = t_link[-1] bbd = bbd[:no * 4] + '\\xfe\\xff\\xff\\xff'",
"ret_str = '' for ch in och: ret_str += struct.pack('<H', ch) # print",
"= ow.delete(no) # 링크 삭제 if t: self.init(t) # 새롭게 OLE 재로딩 #",
"buf = self.fp.read() else: buf = input_data else: raise Error('Input data is invalid.')",
"해제된 내용 or None # --------------------------------------------------------------------- def unarc(self, arc_engine_id, arc_name, fname_in_arc): data =",
"i in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) bbd_link",
"--------------------------------------------------------------------- def parse(self): buf = self.mm[:8] if buf != 'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not Ole",
"0x48) p['Dir'] = kavutil.get_uint32(pps, 0x4c) p['Start'] = kavutil.get_uint32(pps, 0x74) p['Size'] = kavutil.get_uint32(pps, 0x78)",
"BBD link 개수 # --------------------------------------------------------------------- def __modify_big_block_link(self, old_link, add_num): if add_num < 0:",
"' + ('-' * 74) for p in self.pps: if p['Valid'] is False:",
"Support : SBD -> BBD') # 섹터가 변화는 것은 Dec, Inc가 의미 없음",
"i, no in enumerate(sbd_link) if (no == 0xffffffff and i < r_size /",
"# 새로운 Small Block 링크가 필요하다 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block",
"org_link_list : 기존 Small block 링크 # num_link : 필요로 하는 전체 링크",
"+= 1 # END of XBBD # BBD 추가하기 bbd_no = [] b_data",
"4) data = bbd[i * self.bsize:(i + 1) * self.bsize] off = (no",
"-> BBD (Inc)') n = (len(data) / self.bsize) + (1 if (len(data) %",
"(self.small_block[n / 8] + 1) * self.bsize off += (n % 8) *",
"https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if pps[0x50:0x60] in cve_clsids:",
"ch <= 0x4840: if ch >= 0x4800: # 0x4800 - 0x483F # only",
"블록 수 추가하기 # SBD 링크를 처음 생성하므로 이전 링크가 없다. t_link =",
"is False: # 유효한 PPS만 처리함 return 0 pps_name = self.pps[node]['Name'].encode('cp949', 'ignore') name",
"/ 4): no = kavutil.get_uint32(bbd_list_array, i * 4) data = bbd[i * self.bsize:(i",
"def __set_pps_header(self, node, size=None, start=None, pps_prev=None, pps_next=None, pps_dir=None, del_info=False): n = self.root_list_array[node /",
"(self.mm[:off] + struct.pack('<L', no) + self.mm[off+4:]) # --------------------------------------------------------------------- # Small Block을 주어진 개수만큼",
"for i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i] = n",
"start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 BBD의 링크는 모두 삭제한다. #",
": 압축 해제된 내용 or None # --------------------------------------------------------------------- def unarc(self, arc_engine_id, arc_name, fname_in_arc):",
"Block 개수 # --------------------------------------------------------------------- def __add_big_block_num(self, num): size = (len(self.mm) / self.bsize) *",
"self.bsize * num # 실제 필요한 데이터 블록 self.mm += attach_data else: special_no",
"fileformat = { # 포맷 정보를 담을 공간 'Attached_Pos': rsize, 'Attached_Size': fsize -",
"링크는 끝을 처리함 special_no.append(last_no) # 특수 블록 등록 last_no += 1 # END",
"bsize = 1 << kavutil.get_uint16(buf, 0x1e) if idx >= num_of_bbd_blocks: # 범위를 벗어나면",
"i * 4) data = bbd[i * self.bsize:(i + 1) * self.bsize] off",
"node): # 특정 노드의 Max 값을 가진 node를 찾기 no = node while",
"크면 xbbd를 가져와야 함 next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data =",
"data = struct.pack('<LL', last_no, total_xbbd_num) self.mm = self.mm[:0x44] + data + self.mm[0x4C:] else:",
"<< kavutil.get_uint16(buf, 0x1e) if idx >= num_of_bbd_blocks: # 범위를 벗어나면 에러 return -1",
"Ole signature') # big block, small bloc 크기 구하기 self.bsize = 1 <<",
"+ free_link[:add_num] # 최종 결과의 BBD 링크 t_link = old_link[-1:] + free_link[:add_num] #",
"get_block_link(sbd_no, self.bbd) sbd_list_array = get_block_link(sbd_no, self.bbd_fat) # print sbd_list_array for i, no in",
"# --------------------------------------------------------------------- def get_bblock(buf, no, bsize): off = (no+1) * bsize return buf[off:off+bsize]",
"sbd_list_array = get_block_link(sbd_no, self.bbd) sbd_list_array = get_block_link(sbd_no, self.bbd_fat) # print sbd_list_array for i,",
"((self.bsize/4) - 1) if i != (x_num-1): x_data += struct.pack('<L', last_no+1) # 다음",
"or False) # --------------------------------------------------------------------- def mkarc(self, arc_engine_id, arc_name, file_infos): if arc_engine_id == 'arc_ole':",
"연결하기 else: # 이전 링크가 없다면... ret_link = free_link[:add_num] # 최종 결과의 BBD",
"# OleFile 클래스 # --------------------------------------------------------------------- class OleFile: def __init__(self, input_data, write_mode=False, verbose=False): self.verbose",
"ch = MsiBase64Encode(((ch >> 6) & 0x3f)) och.append(ch) ret_str = '' for ch",
"init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화 self.handle = {} self.verbose = verbose",
"n) # 필요한 개수로 링크 줄이기 # Small block 영역에 ssize 만큼씩 Overwrite",
"링크가 없다면... ret_link = free_link[:add_num] # 최종 결과의 BBD 링크 t_link = free_link[:add_num]",
"Block 개수 for no in special_no: seg = no / bb_num off =",
"구하기 (내장) # --------------------------------------------------------------------- def __get_pps_path(self, node=0, prefix=''): if node == 0: pps_name",
"in range(len(t_link)-1): no = t_link[i+1] data = struct.pack('<L', no) no = t_link[i] sbd",
"OleFile('normal.hwp', verbose=True) pics = o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'], o.sbd) # d2 = pics.read() o.close()",
"root_no = self.__get_root_node(del_no) # 양쪽 노드가 존재하는가? if prev_no != 0xffffffff and next_no",
"줄이기 # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기",
"i in t_link: self.sbd += struct.pack('<L', i) # self.mm에 SBD 적용하기 sbd_startblock =",
"self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) #",
"모은다 bbd = '' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd",
"Error('Input data is invalid.') # 수정 모드 self.write_mode = write_mode # OLE 주요",
"enumerate(self.pps): if pps['Prev'] == node or pps['Next'] == node or pps['Dir'] == node:",
"if self.__valid_pps_tree() is False: return False if self.verbose: print kavutil.vprint('Property Storage') ''' print",
"압축 파일 핸들 # --------------------------------------------------------------------- def __get_handle(self, filename): if filename in self.handle: #",
">> 6) & 0x3f)) och.append(ch) ret_str = '' for ch in och: ret_str",
"연결된 링크 # --------------------------------------------------------------------- # SBD link 추가 요청한다. (원본 이미지의 SBD link가",
"%8s %8s' % ('No', 'Name', 'Type', 'Prev', 'Next', ' Dir', 'SB', 'Size') print",
"self.bsize) else 0) self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된 마지막 링크",
"0xffffffff and next_no == 0xffffffff: # 단일 노드 # 1. 0xffffffff 노드 값을",
"+ '%2d %-35s %d %22s %8d' % (self.pps.index(p), tname, p['Type'], t, p['Size']) #",
"= kavutil.get_uint32(self.bbd, i * 4) self.bbd_fat[i] = n self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) #",
"(1 if (len(data) % self.ssize) else 0) t_data = data + ('\\x00' *",
"'\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if pps[0x50:0x60] in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return False self.pps.append(p) # PPS Tree",
"= {'compress': (val & 0x1 == 0x1), 'encrypt': (val & 0x2 == 0x2),",
"# --------------------------------------------------------------------- # OLE의 BBD list의 index를 Offset으로 리턴한다. # --------------------------------------------------------------------- def get_bbd_list_index_to_offset(buf,",
"def __modify_bbd(self, bbd): self.bbd = bbd # 체크 !!! bbd_list_array, _, _, _",
"self.pps[x]['Prev'] != 0xffffffff: if self.pps[x]['Prev'] in scaned_pps_node: self.pps[x]['Prev'] = 0xffffffff else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev'])",
"next_no == 0xffffffff: # 단일 노드 # 1. 0xffffffff 노드 값을 root로 보낸다.",
"# 파일 크기 self.mm = self.mm[:size] # 뒤쪽 쓸모 없는 부분은 제거 attach_data",
"SBD 링크 t_link = old_link[-1:] + free_link[:add_num] # SBD에 링크 연결하기 else: #",
"num): root = self.pps[0] r_size = root['Size'] r_no = root['Start'] # SBD 링크를",
": {파일 포맷 분석 정보} or None # --------------------------------------------------------------------- def format(self, filehandle, filename,",
"# OLE 주요 데이터 self.mm = None self.bsize = None self.ssize = None",
"엔진을 초기화 한다. # 인력값 : plugins_path - 플러그인 엔진의 위치 # verbose",
"0: if next_b in ret: # 이미 링크가 존재하면 종료 break ret.append(next_b) except",
"t: self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # OleWriteStream 클래스 # ---------------------------------------------------------------------",
"# t_link = get_block_link(org_sb, self.sbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat)",
"self.__full_list: if p['Name'] == name: no = p['Node'] break else: no = -1",
"+ (1 if (add_num % (self.bsize/4)) else 0) old_num_bbd = kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block",
"첨부된 파일이 있는지를 조사한다. fsize = len(mm) bsize = 1 << kavutil.get_uint16(mm, 0x1e)",
"# --------------------------------------------------------------------- # mkarc(self, arc_engine_id, arc_name, file_infos) # 입력값 : arc_engine_id - 압축",
"최종 결과의 BBD 링크 t_link = free_link[:add_num] # BBD에 링크 연결하기 for i",
"SBD -> BBD') # 섹터가 변화는 것은 Dec, Inc가 의미 없음 n =",
"== 0x2), 'viewtext': (val & 0x4 == 0x4)} except Error: pass o.close() return",
"새로운 Small Block 링크가 필요하다 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에",
"가진 root를 찾기 for i, pps in enumerate(self.pps): if pps['Prev'] == node or",
"del_pps = self.pps[del_no] prev_no = del_pps['Prev'] next_no = del_pps['Next'] dir_no = del_pps['Dir'] #",
"fileformat: try: # OLE Stream 목록 추출하기 o = self.__get_handle(filename) for name in",
"plugins_path) # 플러그인 엔진을 초기화 한다. # 인력값 : plugins_path - 플러그인 엔진의",
"1) * self.bsize # t_data의 위치 self.mm = self.mm[:off] + t_data + self.mm[off",
"--------------------------------------------------------------------- # unarc(self, arc_engine_id, arc_name, fname_in_arc) # 입력값 : arc_engine_id - 압축 엔진",
"# ----------------------------------------------------------------- for p in self.__full_list: if p['Name'] == name: no = p['Node']",
": filename - 파일 이름 # 리턴값 : 압축 파일 핸들 # ---------------------------------------------------------------------",
"t_no = kavutil.get_uint32(bbd_list_array, seg*4) t_off = ((t_no + 1) * self.bsize) + (off",
"while loop: if e == num_list.pop(0): break end = e break else: for",
"self.ssize = None self.bbd_list_array = None self.bbd = None self.bbd_fat = {} self.sbd",
"/ 8] + 1) * self.bsize off += (n % 8) * self.ssize",
"self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support",
"나누어 bsize 단위가 아니면 맞춘다. n = len(sbd) % self.bsize if n: t",
"= \\ get_bbd_list_array(self.mm, self.verbose) ''' # 상당히 많은 데이터가 출력되어 주석 처리 if",
"root로 보낸다. t_no = 0xffffffff # root 노드를 수정한다. pps = self.pps[root_no] if",
"--------------------------------------------------------------------- # SBD link 추가 요청한다. (원본 이미지의 SBD link가 수정 됨) #",
"d = zlib.decompress(d, -15) d = d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') # var -> foo d",
"init(self, plugins_path) # 플러그인 엔진을 초기화 한다. # 인력값 : plugins_path - 플러그인",
"self.pps[0]['Dir'] != 0xffffffff and self.pps[0]['Type'] == 5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] = True if",
"= OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True) print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') # Root 수정, Next 수정 o.close()",
"self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) t = ow.write(no, data) if t: self.init(t) #",
"= OleFile('xbbd2.ppt', write_mode=True, verbose=True) # o.test() ''' # 늘어나는건 경우의 수가 너무 많음",
"- last_no if n >= num: # 잔여 개수가 추가하려는 개수보다 많거나 같으면",
"storages: ret.append(p['Name']) else: pass return ret # --------------------------------------------------------------------- # 스트림이 존재하는가? # ---------------------------------------------------------------------",
"for i in range(len(self.root) / 0x80): p = {} pps = self.root[i*0x80:(i+1)*0x80] t_size",
"num_list.pop(0) end = e return start, end def read(self): pps = self.parent.pps[self.node] sb",
"verbose # 디버깅용 self.isfile = False # 파일로 접근 중인가? if isinstance(input_data, types.StringType):",
"self.ssize] + self.mm[off + self.ssize:] # --------------------------------------------------------------------- # OLE 영역의 특정 위치에 1개의",
"됨) # old_link : 기존 BBD link # add_num : 추가 BBD link",
"for n in list_array: div_n = self.parent.bsize / self.parent.ssize off = (self.parent.small_block[n /",
"0xffffffff 노드 값을 root로 보낸다. t_no = 0xffffffff # root 노드를 수정한다. pps",
"+ ('\\x00' * ((n * self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에",
"pps, bsize, ssize, bbd, bbd_fat, sbd, sbd_fat, root_list_array, small_block, verbose): self.verbose = verbose",
"Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) return self.mm # ---------------------------------------------------------------------",
"악성코드 이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return vlist # --------------------------------------------------------------------- # format(self, filehandle,",
"--------------------------------------------------------------------- # 특정 데이터를 small block 링크를 따라 데이터 쓰기 (내장) # ---------------------------------------------------------------------",
"parent self.node = node self.read_size = 0 self.fat = None # print self.parent.verbose",
"링크 이후에 존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_small_block_link(t_link, t_num) # Small",
"print '[-] filename :', rname, len(buf) # print '[-] rname :', o.write_stream(a_name, buf)",
"verbose=True) print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') # Root 수정, Next 수정 o.close() ''' o =",
"# 잔여 개수 체크하기 last_no = (size / self.bsize) - 2 # 실제",
"list_array = get_block_link(sb, self.fat) data = '' if size >= 0x1000: t_list =",
"self.pps[x]['Type'] != 2 and len(self.pps[x]['Name']) == 0: continue except IndexError: if (x &",
"= struct.pack('<LL', last_no, total_xbbd_num) self.mm = self.mm[:0x44] + data + self.mm[0x4C:] else: data",
"cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if pps[0x50:0x60] in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158')",
"'%d' % self.ssize) print kavutil.HexDump().Buffer(self.mm, 0, 0x60) print if self.bsize % 0x200 !=",
"/ ((self.bsize - 4) / 4)) + (1 if (t_num % ((self.bsize -",
"struct.pack('<L', pps_prev) + buf[t_off + 4:] if pps_next is not None: t_off =",
"in enumerate(self.pps): if pps['Prev'] == node or pps['Next'] == node or pps['Dir'] ==",
"링크 # num_link : 필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_bbd_link(self,",
"in self.__full_list: if p['Name'] == name: return True else: return False # ---------------------------------------------------------------------",
"== 0xffffffff)] if old_link: ret_link = old_link + free_link[:add_num] # 최종 결과의 SBD",
"Overwrite하기 (내장) # --------------------------------------------------------------------- def __set_bblock(self, no, data): off = (no + 1)",
"== 0x00: name = '_\\x00' + pps[2:t_size-2] else: name = pps[0:t_size-2] p['Name'] =",
"o.write_stream('FileHeader', d) o.close() ''' ''' # case1 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics",
"블록 수 추가하여 링크를 새롭게 생성 # Root 크기 수정 self.__set_pps_header(0, size=r_size +",
"is invalid.') return Stream(self, no) # --------------------------------------------------------------------- # 스트림의 데이터를 덮어쓴다. # ---------------------------------------------------------------------",
"추가 요청 # t_link = get_block_link(r_no, self.bbd) # 이전 Small Block의 링크를 구함",
"self.pps[x]['Dir'] = 0xffffffff else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return True # --------------------------------------------------------------------- # PPS 전체",
"None: t_off = off + 0x48 buf = buf[:t_off] + struct.pack('<L', pps_next) +",
"# PPS Tree 검증 if self.__valid_pps_tree() is False: return False if self.verbose: print",
"생성 # Root 크기 수정 self.__set_pps_header(0, size=r_size + add_big_num * self.bsize) # ---------------------------------------------------------------------",
"return # 추가할 필요 없음 # 잔여 개수 체크하기 last_no = (size /",
"get_bblock(buf, next_b, bsize) bbd_list_array += t_data[:-4] next_b = kavutil.get_uint32(t_data, bsize-4) return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks,",
"self.mm[:off] + data + self.mm[off + self.bsize:] if __name__ == '__main__': # import",
"open('bbd.dmp', 'wb').write(self.bbd) print kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd, 0, 0x80) # Root 읽기 root_startblock =",
"/ (self.bsize / 4)) + (1 if (add_num % (self.bsize / 4)) else",
"# 압축 파일의 핸들을 얻는다. # 입력값 : filename - 파일 이름 #",
"struct.pack('<L', total_bbd_num) + self.mm[0x30:] last_no += 1 # XBBD 처리하기 if total_bbd_num >",
"# o = OleFile('xbbd2.ppt', write_mode=True, verbose=True) # o.test() ''' # 늘어나는건 경우의 수가",
"len(t_data) / self.bsize # 몇개의 블록이 필요한가? self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기",
"= pps['Next'] return no def delete(self, del_no): del_pps = self.pps[del_no] prev_no = del_pps['Prev']",
"t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support :",
"존재 # 1. prev 노드 값을 root로 보낸다. t_no = prev_no elif prev_no",
"for file_info in file_infos: rname = file_info.get_filename() a_name = file_info.get_filename_in_archive() try: if os.path.exists(rname):",
"t_link[i] sbd = sbd[:no*4] + data + sbd[(no+1)*4:] no = t_link[-1] sbd =",
"0xffffffff: # Prev만 존재 # 1. prev 노드 값을 root로 보낸다. t_no =",
"# --------------------------------------------------------------------- # arcclose(self) # 압축 파일 핸들을 닫는다. # --------------------------------------------------------------------- def arcclose(self):",
"data + ('\\x00' * ((n * self.bsize) - len(data))) # 여분의 크기를 data",
"결과의 BBD 링크 t_link = free_link[:add_num] # BBD에 링크 연결하기 for i in",
"'wb').write(self.root) print kavutil.vprint('ROOT') kavutil.vprint(None, 'Start Blocks', '%d' % root_startblock) print kavutil.HexDump().Buffer(self.root, 0, 0x80)",
"= True if self.pps[x]['Prev'] != 0xffffffff: if self.pps[x]['Prev'] in scaned_pps_node: self.pps[x]['Prev'] = 0xffffffff",
"(1 if (t_size % self.bsize) else 0) self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기",
"2. prev 노드 하위에 next가 없는 node를 찾아서 del_pps의 next_no를 등록한다. blank_next_no =",
"'' # add_data = '' add_num = num - n # 추가해야 할",
"bbd self.bbd_fat = bbd_fat self.sbd = sbd self.sbd_fat = sbd_fat self.root_list_array = root_list_array",
"== del_no: self.__set_pps_header(root_no, pps_next=t_no) else: # Dir self.__set_pps_header(root_no, pps_dir=t_no) # 삭제 노드 값은",
"블록 연결 next_b = xbbd_start_block if num_of_xbbd_blocks == 1: t_data = get_bblock(self.mm, next_b,",
"verbose=False): self.verbose = verbose # 디버깅용 self.isfile = False # 파일로 접근 중인가?",
"self.bsize) self.sbd_fat = {} for i in range(len(self.sbd) / 4): n = kavutil.get_uint32(self.sbd,",
"+= 1 # 최종 조합 self.mm += x_data + b_data + add_data +",
"# size : 설정 크기 # start : 시작 링크 # --------------------------------------------------------------------- def",
"size >= 0x1000: self.read_size = self.parent.bsize self.fat = self.parent.bbd_fat else: self.read_size = self.parent.ssize",
"(len(data) % self.ssize) else 0) t_data = data + ('\\x00' * ((n *",
"추가할 필요 없음 else: # 여유분이 부족함. 따라서 Root를 늘려야 함 size =",
"org_link_list, num_link): if len(org_link_list) > num_link: # SBD를 배열로 바꾸기 t_link = []",
"= '_\\x00' + pps[2:t_size-2] else: name = pps[0:t_size-2] p['Name'] = DecodeStreamName(name).decode('UTF-16LE', 'replace') else:",
"sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) self.sbd = '' for no in sbd_list_array: self.sbd +=",
"내용 or None # --------------------------------------------------------------------- def unarc(self, arc_engine_id, arc_name, fname_in_arc): data = None",
"추가 요청한다. (원본 이미지의 BBD link가 수정 됨) # old_link : 기존 BBD",
"if self.verbose: open('sbd.dmp', 'wb').write(self.sbd) print kavutil.vprint('SBD') kavutil.vprint(None, 'Start Blocks', '%d' % sbd_startblock) kavutil.vprint(None,",
"or pps['Dir'] == node: return i def __get_max_node(self, node): # 특정 노드의 Max",
"as fp: buf = fp.read() # print '[-] filename :', rname, len(buf) #",
"# 수집된 마지막 링크 이후에 존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_small_block_link(t_link,",
"이전 링크가 없다. t_link = self.__modify_small_block_link(None, t_num) bbd_list_array, _, _, _ = get_bbd_list_array(self.mm)",
"format(self, filehandle, filename, filename_ex) # 파일 포맷을 분석한다. # 입력값 : filehandle -",
"= bbd self.bbd_fat = bbd_fat self.sbd = sbd self.sbd_fat = sbd_fat self.root_list_array =",
"for i, no in enumerate(sbd_link) if (no == 0xffffffff and i < r_size",
"(t_idx / ((bsize / 4) - 1)) + (1 if (t_idx % ((bsize",
"org_size = self.pps[no]['Size'] ''' if org_size >= 0x1000: # read_size = self.bsize fat",
"if self.verbose: open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd) # 수정된 SDB 적용하기 return ret_link # 연결된",
"sbd_link = [] for i in range(len(sbd) / 4): sbd_link.append(kavutil.get_uint32(sbd, i*4)) # 사용하지",
"and next_no != 0xffffffff: # Next만 존재 # 1. next 노드 값을 root로",
"target_pps = self.pps[no] if target_pps['Valid'] and target_pps['Type'] == 2: # 유효한 PPS에 대한",
"크기 수정 self.__set_pps_header(no, size=len(data)) return self.mm # --------------------------------------------------------------------- # 특정 데이터를 big block",
"# --------------------------------------------------------------------- # OLE 파싱하기 # --------------------------------------------------------------------- def parse(self): buf = self.mm[:8] if",
"끝을 처리함 special_no.append(last_no) # 특수 블록 등록 last_no += 1 # END of",
"1) * 4:] if self.verbose: open('bbd.dm3', 'wb').write(bbd) # 원래 이미지에 BBD 덮어쓰기 self.__modify_bbd(bbd)",
"= self.pps[no] if target_pps['Valid'] and target_pps['Type'] == 2: # 유효한 PPS에 대한 삭제인지",
"= list(list_array) while len(t_list): s, e = self.get_liner_value(t_list) # 연속된 링크를 모두 수집해서",
"node or pps['Dir'] == node: return i def __get_max_node(self, node): # 특정 노드의",
"링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_num = 0",
"시작 링크 # --------------------------------------------------------------------- def __set_pps_header(self, node, size=None, start=None, pps_prev=None, pps_next=None, pps_dir=None, del_info=False):",
"0) x_num = total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수 # XBBD를",
"in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i] = n if self.verbose:",
"= write_mode # OLE 주요 데이터 self.mm = None self.bsize = None self.ssize",
"self.bsize) + (off * 4) self.mm = self.mm[:t_off] + '\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:] #",
"kavutil.get_uint16(self.mm, 0x1e) self.ssize = 1 << kavutil.get_uint16(self.mm, 0x20) if self.verbose: kavutil.vprint('Header') kavutil.vprint(None, 'Big",
"file_infos) # 입력값 : arc_engine_id - 압축 가능 엔진 ID # arc_name -",
"small_block def __get_root_node(self, node): # 해당 정보를 가진 root를 찾기 for i, pps",
"ID, 압축된 파일 이름]] # --------------------------------------------------------------------- def arclist(self, filename, fileformat): file_scan_list = []",
"* ((n * self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num",
"old_link + free_link[:add_num] # 최종 결과의 SBD 링크 t_link = old_link[-1:] + free_link[:add_num]",
"self.bsize * add_num) # 추가해야 할 BBD list 개수는 한개의 BBD에는 bsize /",
"if p['Prev'] == 0xffffffff else '%4d ' % p['Prev'] t += ' -",
"if pps_dir is not None: t_off = off + 0x4C buf = buf[:t_off]",
"for i, no in enumerate(bbd_link) if (no == 0xffffffff and i < size",
"off = ((node % 4) * 0x80) if del_info and off == 0x180:",
"self.bbd_fat) self.root_list_array = root_list_array self.root = '' for no in root_list_array: self.root +=",
"실패 # --------------------------------------------------------------------- def init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화 self.handle =",
"원래 이미지에 BBD 덮어쓰기 self.__modify_bbd(bbd) return ret_link # 연결된 링크 # --------------------------------------------------------------------- #",
"핸들 # filename - 파일 이름 # filename_ex - 압축 파일 내부 파일",
"+ self.mm[off + self.bsize:] # --------------------------------------------------------------------- # 특정 데이터를 small block 링크를 따라",
"설정하기 # 남은 링크는 모두 0xffffffff로 설정하기 for i in t[1:]: t_link[i] =",
"(t_size % self.ssize) else 0) self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된",
"여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기",
"add_num) # 추가해야 할 BBD list 개수는 한개의 BBD에는 bsize / 4 개수만큼",
"파일 이름 # fileformat - 파일 포맷 분석 정보 # 리턴값 : [[압축",
"0xfffffffe: break if len(ret) % 10000 == 0: if next_b in ret: #",
"파일 핸들을 닫는다. # --------------------------------------------------------------------- def arcclose(self): for fname in self.handle.keys(): zfile =",
"if old_link: ret_link = old_link + free_link[:add_num] # 최종 결과의 BBD 링크 t_link",
"경우 더이상 분석하지 않기 위해 처리 f = [] if len(self.pps) == 0:",
"kavutil.vprint('SBD') kavutil.vprint(None, 'Start Blocks', '%d' % sbd_startblock) kavutil.vprint(None, 'Num of SBD Blocks', '%d'",
"= root_list_array self.small_block = small_block def __get_root_node(self, node): # 해당 정보를 가진 root를",
"2: # 유효한 PPS에 대한 삭제인지 확인 if reset_stream: size = target_pps['Size'] t",
"준비한다 if len(data) >= 0x1000: # BBD를 사용한다. if org_size >= 0x1000: #",
"self.bsize = None self.ssize = None self.bbd_list_array = None self.bbd = None self.bbd_fat",
"data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_link =",
"print ' ' + ('-' * 74) for p in self.pps: print '",
"org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- # BBD 링크를 줄인다 # org_link_list",
"print buf = get_bblock(self.mm, n, self.bsize) kavutil.HexDump().Buffer(buf, 0, 0x200) # --------------------------------------------------------------------- # SBD",
"for x in num_list: if e + 1 == x: e = x",
"1) * self.bsize self.mm = self.mm[:off] + data + self.mm[off + self.bsize:] if",
"off += (n % div_n) * self.parent.ssize data += self.parent.mm[off:off + self.read_size] if",
"= get_block_link(sbd_startblock, self.bbd_fat) for i, n in enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif",
"Blocks', '%d' % sbd_startblock) kavutil.vprint(None, 'Num of SBD Blocks', '%d' % num_of_sbd_blocks) print",
"BBD 덮어쓰기 self.__modify_bbd(bbd) return ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD link",
"4:] if self.verbose: open('bbd.dm3', 'wb').write(bbd) # 원래 이미지에 BBD 덮어쓰기 self.__modify_bbd(bbd) return ret_link",
"%-4s %8s %8s' % ('No', 'Name', 'Type', 'Prev', 'Next', ' Dir', 'SB', 'Size')",
"target_pps['Type'] == 2: # 유효한 PPS에 대한 삭제인지 확인 if reset_stream: size =",
"num_link : 필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_bbd_link(self, org_link_list, num_link):",
"= zlib.decompress(d, -15) d = d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') # var -> foo d =",
"# 추가해야 할 블록 수 add_data = ('\\x00' * self.bsize * add_num) #",
"정보} or None # --------------------------------------------------------------------- def format(self, filehandle, filename, filename_ex): ret = {}",
"8) * self.ssize self.mm = self.mm[:off] + t_data[i * self.ssize:(i + 1) *",
"보낸다. t_no = 0xffffffff # root 노드를 수정한다. pps = self.pps[root_no] if pps['Prev']",
"+= get_bblock(self.mm, no, self.bsize) self.bbd_fat = {} for i in range(len(self.bbd) / 4):",
"in num_list: if e + 1 == x: e = x loop =",
"# 버전 info['title'] = 'OLE Library' # 엔진 설명 info['kmd_name'] = 'ole' #",
"0xffffffff and i < r_size / self.ssize)] if len(free_link) >= num: # 여유분이",
"Dec, Inc가 의미 없음 n = (len(data) / self.bsize) + (1 if (len(data)",
"== 0xffffffff: # Prev만 존재 # 1. prev 노드 값을 root로 보낸다. t_no",
"0x44) # num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if",
"크기 수정 self.__set_pps_header(0, size=r_size + add_big_num * self.bsize) # --------------------------------------------------------------------- # BBD link",
"* self.bsize:(i + 1) * self.bsize] + self.mm[off + self.bsize:] # --------------------------------------------------------------------- #",
"+ 1) * self.ssize] + self.mm[off + self.ssize:] # --------------------------------------------------------------------- # OLE 영역의",
"self.__decrease_bbd_link(t_link, n) # 필요한 개수로 링크 줄이기 # Big block 영역에 bsize 만큼씩",
"- 0x483F # only one charecter can be decoded ch = MsiBase64Encode(ch -",
"-> SBD (Inc)') # 작업 완료 n = (len(data) / self.ssize) + (1",
"- 4) / 4)) else 0) x_num = total_xbbd_num - num_of_xbbd_blocks # 추가해야",
"num_of_bbd_blocks: return False self.bbd = '' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(self.bbd_list_array,",
"# --------------------------------------------------------------------- # SBD link 추가 요청한다. (원본 이미지의 SBD link가 수정 됨)",
"+ pps[2:t_size-2] else: name = pps[0:t_size-2] p['Name'] = DecodeStreamName(name).decode('UTF-16LE', 'replace') else: p['Name'] =",
"# 이미 분석한 노드의 경우 더이상 분석하지 않기 위해 처리 f = []",
"return buf[off:off+bsize] # --------------------------------------------------------------------- # OLE의 BBD 리스트를 얻는다. # --------------------------------------------------------------------- def get_bbd_list_array(buf,",
"종료 break else: # 항상 오른쪽 노드가 큰 값임 no = pps['Next'] return",
"# raise error('Not Support : SBD -> SBD (Inc)') # 작업 완료 n",
"OLE 파싱하기 # --------------------------------------------------------------------- def parse(self): buf = self.mm[:8] if buf != 'D0CF11E0A1B11AE1'.decode('hex'):",
"0x200) # --------------------------------------------------------------------- # SBD 링크를 줄인다 # org_link_list : 기존 Small block",
"for i in range(len(bbd_list_array)/4): n = kavutil.get_uint32(bbd_list_array, i*4) self.bbd += get_bblock(self.mm, n, self.bsize)",
"엔진 초기화 성공 # --------------------------------------------------------------------- # uninit(self) # 플러그인 엔진을 종료한다. # 리턴값",
"ch = MsiBase64Encode(ch - 0x4800) if not ch: continue else: # 0x3800 -",
"del_info and off == 0x180: buf = buf[:off] + '\\x00' * 0x80 elif",
"link 개수 # --------------------------------------------------------------------- def __modify_big_block_link(self, old_link, add_num): if add_num < 0: return",
"# 추가할 필요 없음 else: # 여유분이 부족함. 따라서 Root를 늘려야 함 size",
"# --------------------------------------------------------------------- def __modify_big_block_link(self, old_link, add_num): if add_num < 0: return [] #",
"invalid.') # 수정 모드 self.write_mode = write_mode # OLE 주요 데이터 self.mm =",
"self.pps[node]['Next'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix) return 0 # --------------------------------------------------------------------- # PPS 전체 경로",
"입력값 : arc_engine_id - 압축 엔진 ID # arc_name - 압축 파일 #",
"old_link[-1:] + free_link[:add_num] # SBD에 링크 연결하기 else: # 이전 링크가 없다면... ret_link",
"= ow.write(no, '\\x00' * size) # 모든 데이터를 0으로 Wipe t = ow.delete(no)",
"내부 링크 구하기 # --------------------------------------------------------------------- def get_block_link(no, bbd_or_sbd_fat): ret = [] fat =",
"목록을 얻는다. # 입력값 : filename - 파일 이름 # fileformat - 파일",
"!= 2 and len(self.pps[x]['Name']) == 0: continue except IndexError: if (x & 0x90900000)",
"except IOError: # print file_info.get_filename_in_archive() pass o.close() # zfile.close() return True return False",
"print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0, 80) return data[:size] def close(self): pass # ----------------------------------------------------------------- for",
"= {} for i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i *",
"ch >= 0x4800: # 0x4800 - 0x483F # only one charecter can be",
"org_size >= len(data): # raise error('Not Support : SBD -> SBD (Dec)') #",
"함 size = num * self.ssize # 추가해야 할 용량 add_big_num = (size",
"+ buf[t_off + 4:] self.__set_bblock(n, buf) if self.verbose: print buf = get_bblock(self.mm, n,",
"정보를 알려준다. (제작자, 버전, ...) # 리턴값 : 플러그인 엔진 정보 # ---------------------------------------------------------------------",
"/ self.bsize) * self.bsize # 파일 크기 self.mm = self.mm[:size] # 뒤쪽 쓸모",
"0xffffffff: # 단일 노드 # 1. 0xffffffff 노드 값을 root로 보낸다. t_no =",
"[] sbd = self.sbd if self.verbose: open('sbd.dm2', 'wb').write(sbd) # SBD 링크를 생성한다. sbd_link",
"num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block) bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) bb_num = (self.bsize/4) #",
"바꾸기 t_link = [] for i in range(len(self.sbd) / 4): t_link.append(kavutil.get_uint32(self.sbd, i *",
"# self.init(self.mm) # return ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd,",
"p['Node'] break else: no = -1 if no == -1: raise Error('PPS name(%s)",
"= self.pps[0] r_size = root['Size'] r_no = root['Start'] # SBD 링크를 생성한다. sbd_link",
"블록 self.mm += attach_data else: special_no = [] # 특수 목적의 Big Block",
"free_link = [i for i, no in enumerate(bbd_link) if (no == 0xffffffff)] if",
"self.ssize) print kavutil.HexDump().Buffer(self.mm, 0, 0x60) print if self.bsize % 0x200 != 0 or",
"+ add_data + attach_data # 특수 블록에 BBD list도 추가 special_no += bbd_no",
"buf = buf[:t_off] + struct.pack('<L', start) + buf[t_off + 4:] if pps_prev is",
"# num_link : 필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_bbd_link(self, org_link_list,",
"= self.sbd if self.verbose: open('sbd.dm2', 'wb').write(sbd) # SBD 링크를 생성한다. sbd_link = []",
"= pps self.bsize = bsize self.ssize = ssize self.bbd = bbd self.bbd_fat =",
"# 입력값 : filehandle - 파일 핸들 # filename - 파일 이름 #",
"# --------------------------------------------------------------------- def arclist(self, filename, fileformat): file_scan_list = [] # 검사 대상 정보를",
"self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) t = ow.write(no, data) if t:",
"verbose=True) pics = o.openstream('Scripts/DefaultJScript') d = pics.read() d = zlib.decompress(d, -15) d =",
"self.sbd_fat = sbd_fat self.root_list_array = root_list_array self.small_block = small_block def __get_root_node(self, node): #",
"= ((node % 4) * 0x80) if del_info and off == 0x180: buf",
"= [i for i, no in enumerate(sbd_link) if (no == 0xffffffff)] if old_link:",
"엔진 설명 info['kmd_name'] = 'ole' # 엔진 파일 이름 info['make_arc_type'] = kernel.MASTER_PACK #",
"o.write_stream(a_name, buf) # zfile.writestr(a_name, buf) else: # 삭제 처리 o.delete(a_name) except IOError: #",
"t_data = get_bblock(self.mm, next_b, self.bsize) print kavutil.HexDump().Buffer(self.mm, (next_b+1) * self.bsize) next_b = kavutil.get_uint32(t_data,",
"않는 BBD 링크를 찾는다. free_link = [i for i, no in enumerate(bbd_link) if",
"링크 수집하기 sbd = self.sbd for no in t_link: sbd = sbd[:no*4] +",
"if self.verbose: open('root.dmp', 'wb').write(self.root) print kavutil.vprint('ROOT') kavutil.vprint(None, 'Start Blocks', '%d' % root_startblock) print",
"in enumerate(bbd_no): off = get_bbd_list_index_to_offset(self.mm, old_num_bbd + i) # print hex(off) self.mm =",
"= off + 0x78 buf = buf[:t_off] + struct.pack('<L', size) + buf[t_off +",
"= None self.root_list_array = None self.exploit = [] # 취약점 존재 여부 #",
"xbbd_start_block = kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(self.mm, 0x48) # 추가적인 Big Block을 계산한다.",
"free_link[:add_num] # 최종 결과의 BBD 링크 t_link = old_link[-1:] + free_link[:add_num] # BBD에",
"/ 4): sbd_link.append(kavutil.get_uint32(sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link = [i",
"# --------------------------------------------------------------------- def uninit(self): # 플러그인 엔진 종료 return 0 # 플러그인 엔진",
"--------------------------------------------------------------------- # OLE 파일인지 확인한다. # --------------------------------------------------------------------- def is_olefile(filename): try: buf = open(filename,",
"div_n] + 1) * self.parent.bsize off += (n % div_n) * self.parent.ssize data",
"= get_block_link(root_startblock, self.bbd_fat) self.root_list_array = root_list_array self.root = '' for no in root_list_array:",
"추가한다. # num : 추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_small_block_num(self, num):",
"'\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:] self.__modify_sbd(sbd) else: # SBD를 사용한다. if org_size >= 0x1000: #",
"ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정, start 블록 수정 self.__set_pps_header(no,",
"bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) bb_num = (self.bsize/4) # 한개의 BBD list",
"이미지의 SBD link가 수정 됨) # old_link : 기존 SBD link # add_num",
"in sbd_list_array: self.sbd += get_bblock(self.mm, no, self.bsize) self.sbd_fat = {} for i in",
"존재하는가? zfile = self.handle.get(filename, None) else: zfile = OleFile(filename, verbose=self.verbose) # ole 파일",
"fat = bbd_or_sbd_fat next_b = no if next_b != 0xfffffffe: ret.append(next_b) while True:",
"* self.bsize) # --------------------------------------------------------------------- # BBD link 추가 요청한다. (원본 이미지의 BBD link가",
"ret['ff_attach'] = fileformat # HWP 인가? o = OleFile(filename) try: pics = o.openstream('FileHeader')",
"= free_link[:add_num] # BBD에 링크 연결하기 for i in range(len(t_link)-1): no = t_link[i+1]",
"포맷중에 OLE 파일 포맷이 있는가? if 'ff_ole' in fileformat: try: # OLE Stream",
"org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- # Big Block을 주어진 개수만큼 추가한다.",
"같으면 추가 블록 개수만 파일 뒤에 추가하기 self.mm += '\\x00' * self.bsize *",
"e = self.get_liner_value(t_list) # 연속된 링크를 모두 수집해서 한꺼번에 파일로 읽기 off =",
"o = OleFile('xbbd2.ppt', write_mode=True, verbose=True) # o.test() ''' # 늘어나는건 경우의 수가 너무",
"node를 찾기 no = node while True: pps = self.pps[no] if pps['Next'] ==",
"hex(off) self.mm = (self.mm[:off] + struct.pack('<L', no) + self.mm[off+4:]) # --------------------------------------------------------------------- # Small",
"i*4)) # 사용하지 않는 BBD 링크를 찾는다. free_link = [i for i, no",
"< len(t_data): # 블록 추가해야 하나? t_size = len(t_data) - (len(t_link) * self.ssize)",
"no in enumerate(sbd_list_array): data = sbd[i*self.bsize:(i+1)*self.bsize] off = (no + 1) * self.bsize",
"# 최종 결과의 SBD 링크 t_link = old_link[-1:] + free_link[:add_num] # SBD에 링크",
"' if p['Prev'] == 0xffffffff else '%4d ' % p['Prev'] t += '",
"self.bsize) # --------------------------------------------------------------------- # BBD link 추가 요청한다. (원본 이미지의 BBD link가 수정",
"i) # self.mm에 SBD 적용하기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat)",
"찾기 no = node while True: pps = self.pps[no] if pps['Next'] == 0xffffffff:",
"값을 가진 node를 찾기 no = node while True: pps = self.pps[no] if",
"파일 열기 self.handle[filename] = zfile return zfile # --------------------------------------------------------------------- # arclist(self, filename, fileformat)",
"raise error('Not Support : SBD -> SBD (Dec)') # 지원 완료 n =",
"arc_name - 압축 파일 # fname_in_arc - 압축 해제할 파일 이름 # 리턴값",
"= off + 0x48 buf = buf[:t_off] + struct.pack('<L', pps_next) + buf[t_off +",
"if old_num_bbd + b_num > 109: t_num = (old_num_bbd + b_num - 109)",
"self.verbose) ''' # 상당히 많은 데이터가 출력되어 주석 처리 if self.verbose: print if",
"'SB', 'Size') print ' ' + ('-' * 74) for p in self.pps:",
"= len(t_data) / self.bsize # 몇개의 블록이 필요한가? self.__add_big_block_num(t_num) # 필요한 블록 수",
"링크 # --------------------------------------------------------------------- # SBD link 추가 요청한다. (원본 이미지의 SBD link가 수정",
">= 0x1000: t_list = list(list_array) while len(t_list): s, e = self.get_liner_value(t_list) # 연속된",
"성공 # --------------------------------------------------------------------- # getinfo(self) # 플러그인 엔진의 주요 정보를 알려준다. (제작자, 버전,",
"print ' %-2s %-20s %4s %-8s %-8s %-8s %-8s %-8s' % ('No', 'Name',",
"end = None if not start: start = num_list.pop(0) e = start loop",
"가능한 악성코드 수 return info # --------------------------------------------------------------------- # listvirus(self) # 진단/치료 가능한 악성코드의",
"사용 # raise error('Not Support : BBD -> SBD') # 섹터가 변화는 것은",
"if target_pps['Valid'] and target_pps['Type'] == 2: # 유효한 PPS에 대한 삭제인지 확인 if",
"가능 엔진 ID # arc_name - 최종적으로 압축될 압축 파일 이름 # file_infos",
"# 필요한 블록 수 추가하기 # SBD 링크를 처음 생성하므로 이전 링크가 없다.",
"# sbd_list_array = get_block_link(sbd_no, self.bbd) sbd_list_array = get_block_link(sbd_no, self.bbd_fat) # print sbd_list_array for",
"kernel.MASTER_PACK # 악성코드 치료 후 재압축 유무 info['sig_num'] = len(self.listvirus()) # 진단/치료 가능한",
"# t = get_bblock(self.mm, t_no, self.bsize) # print repr(t) # t = kavutil.get_uint32(t,",
"/ 4)) else 0) if old_b_num == b_num: break else: old_b_num = b_num",
"in ret: # 이미 링크가 존재하면 종료 break ret.append(next_b) except KeyError: break return",
"raise error('Not Support : BBD -> BBD (Dec)') # 개발 완료 n =",
"zfile.close() self.handle.pop(fname) # --------------------------------------------------------------------- # mkarc(self, arc_engine_id, arc_name, file_infos) # 입력값 : arc_engine_id",
"블록 처리 (bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block) bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) bb_num",
"경우 # o = OleFile('xbbd2.ppt', write_mode=True, verbose=True) # o.test() ''' # 늘어나는건 경우의",
"Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # 기존에는 SBD",
"num_of_bbd_blocks: # 범위를 벗어나면 에러 return -1 if idx <= 109: return 0x4c",
">= num: # 여유분이 충분히 존재함... return # 추가할 필요 없음 else: #",
": 기존 BBD link # add_num : 추가 BBD link 개수 # ---------------------------------------------------------------------",
"# zfile.writestr(a_name, buf) else: # 삭제 처리 o.delete(a_name) except IOError: # print file_info.get_filename_in_archive()",
"bsize) bbd_list_array += t_data[:-4] next_b = kavutil.get_uint32(t_data, bsize-4) return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block",
"in enumerate(t_link): off = (self.small_block[n / 8] + 1) * self.bsize off +=",
"= '' for ch in och: ret_str += struct.pack('<H', ch) # print ret_str.decode('UTF-16LE',",
"kavutil.vprint(None, 'Start Blocks', '%d' % root_startblock) print kavutil.HexDump().Buffer(self.root, 0, 0x80) # sbd 읽기",
"num_of_bbd_blocks * 109) next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(self.mm,",
"= kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks = kavutil.get_uint32(self.mm, 0x40) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) self.sbd =",
"sbd[(no+1)*4:] self.__modify_sbd(sbd) else: # SBD를 사용한다. if org_size >= 0x1000: # 기존에는 BBD",
"enumerate(bbd_link) if (no == 0xffffffff)] if old_link: ret_link = old_link + free_link[:add_num] #",
"/ self.bsize) + (1 if (size % self.bsize) else 0) # 추가해야 할",
"않는 블록을 수집한다. t_link = self.__modify_big_block_link(t_link, t_num) # Big block 영역에 bsize 만큼씩",
"== 0xffffffff else '%4d ' % p['Next'] t += ' - ' if",
"# --------------------------------------------------------------------- def openstream(self, name): # ----------------------------------------------------------------- # 스트림 전용 클래스 # -----------------------------------------------------------------",
"False: # 유효한 Tree가 아니면 다음 continue t = '' t += '",
"open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd) # 수정된 SDB 적용하기 return ret_link # 연결된 링크 #",
"kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if verbose: kavutil.vprint(None, 'Num of",
"Author: <NAME>(<EMAIL>) import os import sys import struct import types import kernel import",
"= verbose # 디버깅용 self.isfile = False # 파일로 접근 중인가? if isinstance(input_data,",
"* self.bsize * add_num) # 추가해야 할 BBD list 개수는 한개의 BBD에는 bsize",
"1)) else 0) off = (t_idx % ((bsize / 4) - 1)) next_b",
"self.bsize:] if __name__ == '__main__': # import zlib # o = OleFile('normal.hwp', write_mode=True,",
"'Num of SBD Blocks', '%d' % num_of_sbd_blocks) print kavutil.HexDump().Buffer(self.sbd, 0, 0x80) # PPS",
"# 전체 bbd_list num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks =",
"case1 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('Scripts/DefaultJScript') d = pics.read() d",
"prev_no != 0xffffffff and next_no == 0xffffffff: # Prev만 존재 # 1. prev",
"range(len(bbd_list_array) / 4): no = kavutil.get_uint32(bbd_list_array, i * 4) data = bbd[i *",
"kavutil.vprint('Property Storage') ''' print ' %-2s %-20s %4s %-8s %-8s %-8s %-8s %-8s'",
"# 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # 스트림 또는 스토리지를 삭제한다. # ---------------------------------------------------------------------",
"t_data = get_bblock(self.mm, next_b, self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) # 기존 XBBD 마지막에",
"kavutil.get_uint32(self.mm, 0x3c) # sbd_list_array = get_block_link(sbd_no, self.bbd) sbd_list_array = get_block_link(sbd_no, self.bbd_fat) # print",
"kavutil.get_uint32(self.sbd, i*4) self.sbd_fat[i] = n if self.verbose: open('sbd.dmp', 'wb').write(self.sbd) print kavutil.vprint('SBD') kavutil.vprint(None, 'Start",
"print repr(t) # t = kavutil.get_uint32(t, off*4) # print hex(t) # BBD List에",
"self.bbd_fat) # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기",
"bsize = 1 << kavutil.get_uint16(mm, 0x1e) rsize = (fsize / bsize) * bsize",
"'\\x00' * self.bsize * num # 실제 필요한 데이터 블록 self.mm += attach_data",
"struct.pack('<L', start) + buf[t_off + 4:] if pps_prev is not None: t_off =",
"수정한다. pps = self.pps[root_no] if pps['Prev'] == del_no: self.__set_pps_header(root_no, pps_prev=t_no) elif pps['Next'] ==",
"for i in range(num_of_xbbd_blocks-1): t_data = get_bblock(self.mm, next_b, self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4)",
"공간 'Attached_Pos': rsize, 'Attached_Size': fsize - rsize } ret['ff_attach'] = fileformat # HWP",
"= len(mm) bsize = 1 << kavutil.get_uint16(mm, 0x1e) rsize = (fsize / bsize)",
"elif prev_no != 0xffffffff and next_no == 0xffffffff: # Prev만 존재 # 1.",
"filename - 파일 이름 # filename_ex - 압축 파일 내부 파일 이름 #",
"!= 0 or self.ssize != 0x40: # 이상 파일 정보 처리 return False",
"'replace') return ret_str # --------------------------------------------------------------------- # OLE 내부 링크 구하기 # --------------------------------------------------------------------- def",
"링크를 찾는다. free_link = [i for i, no in enumerate(bbd_link) if (no ==",
"(Dec)') # 지원 완료 n = (len(data) / self.ssize) + (1 if (len(data)",
"Error('Invalid call') # --------------------------------------------------------------------- # Big Block을 주어진 개수만큼 추가한다. # num :",
"num_list.pop(0) e = start loop = False for x in num_list: if e",
"+ 0x4C buf = buf[:t_off] + struct.pack('<L', pps_dir) + buf[t_off + 4:] self.__set_bblock(n,",
"= bbd[i * self.bsize:(i + 1) * self.bsize] off = (no + 1)",
"= (self.mm[:off] + struct.pack('<L', no) + self.mm[off+4:]) # --------------------------------------------------------------------- # Small Block을 주어진",
"if del_info and off == 0x180: buf = buf[:off] + '\\x00' * 0x80",
"sbd[:no*4] + '\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:] self.__modify_sbd(sbd) else: # SBD를 사용한다. if org_size >=",
"# SBD 링크를 생성한다. sbd_link = [] for i in range(len(self.sbd) / 4):",
"블록의 링크는 끝을 처리함 special_no.append(last_no) # 특수 블록 등록 last_no += 1 #",
"# 1. prev 노드 값을 root로 보낸다. t_no = prev_no elif prev_no ==",
"in self.handle.keys(): zfile = self.handle[fname] zfile.close() self.handle.pop(fname) # --------------------------------------------------------------------- # mkarc(self, arc_engine_id, arc_name,",
"/ self.ssize)] if len(free_link) >= num: # 여유분이 충분히 존재함... return # 추가할",
"0x80) # PPS 읽기 self.pps = [] for i in range(len(self.root) / 0x80):",
"SBD 사용 # raise error('Not Support : SBD -> BBD') # 섹터가 변화는",
"num_of_xbbd_blocks == 1: t_data = get_bblock(self.mm, next_b, self.bsize) else: t_data = '' for",
"next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(self.mm, next_b, self.bsize) print",
"(n + 1) * self.bsize self.mm = self.mm[:off] + t_data[i * self.bsize:(i +",
"enumerate(sbd_list_array): data = sbd[i*self.bsize:(i+1)*self.bsize] off = (no + 1) * self.bsize self.mm =",
"것은 Dec, Inc가 의미 없음 n = (len(data) / self.bsize) + (1 if",
"range(len(self.bbd) / 4): t_link.append(kavutil.get_uint32(self.bbd, i * 4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link]",
"if n: t = self.bsize - n sbd += '\\xff' * t if",
"Next 수정 o.close() ''' o = OleFile('normal.hwp', verbose=True) pics = o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'],",
"# --------------------------------------------------------------------- # OLE의 BBD 리스트를 얻는다. # --------------------------------------------------------------------- def get_bbd_list_array(buf, verbose=False): bbd_list_array",
"# 인력값 : plugins_path - 플러그인 엔진의 위치 # verbose - 디버그 모드",
"# 플러그인 엔진 종료 return 0 # 플러그인 엔진 종료 성공 # ---------------------------------------------------------------------",
"이전 링크가 없다. t_link = self.__modify_big_block_link(None, t_num) # Big block 영역에 bsize 만큼씩",
"self.pps: if p['Valid'] is False: # 유효한 Tree가 아니면 다음 continue t =",
"(val & 0x2 == 0x2), 'viewtext': (val & 0x4 == 0x4)} except Error:",
"Max 값을 가진 node를 찾기 no = node while True: pps = self.pps[no]",
"OLE 재로딩 # --------------------------------------------------------------------- # 스트림 또는 스토리지를 삭제한다. # --------------------------------------------------------------------- def delete(self,",
"self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) if self.verbose: print kavutil.vprint('Small Blocks') print self.small_block return True",
"kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i] = n if self.verbose: open('bbd.dmp', 'wb').write(self.bbd) print kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd,",
"제거 attach_data = self.mm[size:] # 파일 뒤에 붙어 있는 잔여 데이터 # 전체",
"0x3800 <= ch <= 0x4840: if ch >= 0x4800: # 0x4800 - 0x483F",
"if (x & 0x90900000) == 0x90900000: # CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return False else:",
"if e + 1 == x: e = x loop = True continue",
"--------------------------------------------------------------------- # Small Block을 주어진 개수만큼 추가한다. # num : 추가할 Big Block",
"- 파일 이름 # filename_ex - 압축 파일 내부 파일 이름 # 리턴값",
"# 잔여 개수가 추가하려는 개수보다 많거나 같으면 추가 블록 개수만 파일 뒤에 추가하기",
"%8X %8X %8d' % (self.pps.index(p), p['Name'], p['Type'], p['Prev'], p['Next'], p['Dir'], p['Start'], p['Size']) '''",
"# __get_handle(self, filename) # 압축 파일의 핸들을 얻는다. # 입력값 : filename -",
"# SBD 배열을 SBD 버퍼로 바꾸기 self.sbd = '' for i in t_link:",
"더이상 오른쪽이 없으면 탐색 종료 break else: # 항상 오른쪽 노드가 큰 값임",
"self.sbd for no in t_link: sbd = sbd[:no*4] + '\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:] self.__modify_sbd(sbd)",
"& 0x90900000) == 0x90900000: # CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return False else: # CVE-2003-0347",
"buf = get_bblock(self.mm, n, self.bsize) kavutil.HexDump().Buffer(buf, 0, 0x200) # --------------------------------------------------------------------- # SBD 링크를",
"coding:utf-8 -*- # Author: <NAME>(<EMAIL>) import os import sys import struct import types",
"데이터를 덮어쓴다. # --------------------------------------------------------------------- def write_stream(self, name, data): for p in self.__full_list: if",
"return False self.pps.append(p) # PPS Tree 검증 if self.__valid_pps_tree() is False: return False",
"0x4)} except Error: pass o.close() return ret # --------------------------------------------------------------------- # __get_handle(self, filename) #",
"self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return org_link_list else: raise Error('Invalid call')",
"os.path.exists(rname): with open(rname, 'rb') as fp: buf = fp.read() # print '[-] filename",
"in range(len(t_link)-1): no = t_link[i+1] data = struct.pack('<L', no) no = t_link[i] bbd",
"'' name = prefix + pps_name else: if self.pps[node]['Valid'] is False: # 유효한",
"buf) # zfile.writestr(a_name, buf) else: # 삭제 처리 o.delete(a_name) except IOError: # print",
"' %-2s %-20s %4s %-8s %-8s %-8s %-8s %-8s' % ('No', 'Name', 'Type',",
"0xffffffff and next_no != 0xffffffff: # 양쪽 모두 노트가 존재함 # 1. prev",
"# --------------------------------------------------------------------- class OleWriteStream: def __init__(self, mm, pps, bsize, ssize, bbd, bbd_fat, sbd,",
"{ # 포맷 정보를 담을 공간 'Attached_Pos': rsize, 'Attached_Size': fsize - rsize }",
"_, _ = get_bbd_list_array(self.mm) # BBD를 모은다 bbd = '' for i in",
"줄인다 # org_link_list : 기존 Small block 링크 # num_link : 필요로 하는",
"번호. 해당 블록은 0xfffffffd로 처리해야 함 x_data = '' # b_data = ''",
"self.pps = [] for i in range(len(self.root) / 0x80): p = {} pps",
"self.bsize) - 2 # 실제 마지막 Big Block 번호 n = (len(self.bbd) /",
"d = d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') # var -> foo d = zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d)",
"1. prev 노드 값을 root로 보낸다. t_no = prev_no elif prev_no == 0xffffffff",
"self.mm += x_data + b_data + add_data + attach_data # 특수 블록에 BBD",
"= zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d) o.close() ''' # ------------------------------------------------------------------------- # KavMain 클래스 # -------------------------------------------------------------------------",
"continue t = '' t += ' - ' if p['Prev'] == 0xffffffff",
"call') # --------------------------------------------------------------------- # BBD 링크를 줄인다 # org_link_list : 기존 Small block",
"x in num_list: if e + 1 == x: e = x loop",
"* 4] + '\\xfe\\xff\\xff\\xff' + bbd[(no + 1) * 4:] if self.verbose: open('bbd.dm3',",
"0x1e) rsize = (fsize / bsize) * bsize if fsize > rsize: fileformat",
"-1 if idx <= 109: return 0x4c + (idx * 4) else: t_idx",
"검증 if self.__valid_pps_tree() is False: return False if self.verbose: print kavutil.vprint('Property Storage') '''",
"--------------------------------------------------------------------- # __get_handle(self, filename) # 압축 파일의 핸들을 얻는다. # 입력값 : filename",
"self.pps: print ' ' + '%2d %-23s %d %8X %8X %8X %8X %8d'",
"xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) ''' # 상당히 많은 데이터가 출력되어 주석 처리",
"buf[:t_off] + struct.pack('<L', start) + buf[t_off + 4:] if pps_prev is not None:",
"root_list_array, small_block, verbose): self.verbose = verbose self.mm = mm self.pps = pps self.bsize",
"break else: # 항상 오른쪽 노드가 큰 값임 no = pps['Next'] return no",
"추가하기 bbd_no = [] b_data = '\\xff' * self.bsize * b_num for i",
"PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support : BBD ->",
"[] och = [] for i in range(len(name) / 2): wch.append(kavutil.get_uint16(name, i *",
"= -1 if no == -1: raise Error('PPS name is invalid.') return Stream(self,",
"__init__(self, mm, pps, bsize, ssize, bbd, bbd_fat, sbd, sbd_fat, root_list_array, small_block, verbose): self.verbose",
"= get_block_link(sb, self.fat) data = '' if size >= 0x1000: t_list = list(list_array)",
"기존 XBBD 마지막에 새로운 XBBD 링크 추가 t_data = t_data[:-4] + struct.pack('<L', last_no)",
"else: raise Error('Invalid call') # --------------------------------------------------------------------- # BBD 링크를 줄인다 # org_link_list :",
"109보다 크면 xbbd를 가져와야 함 next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data",
"t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_num = 0 if (len(t_link)",
"+ 1 == x: e = x loop = True continue else: while",
"wch.append(kavutil.get_uint16(name, i * 2)) for ch in wch: if 0x3800 <= ch <=",
"link가 수정 됨) # old_link : 기존 SBD link # add_num : 추가",
"org_sb = self.pps[no]['Start'] org_size = self.pps[no]['Size'] ''' if org_size >= 0x1000: # read_size",
"PPS 헤더에 존재하는 특정 스트림의 크기를 조정한다. (내장) # node : PPS 인덱스",
"open(input_data, 'rb') buf = self.fp.read() else: buf = input_data else: raise Error('Input data",
"SBD 버퍼로 바꾸기 self.sbd = '' for i in t_link: self.sbd += struct.pack('<L',",
"= file_info.get_filename_in_archive() try: if os.path.exists(rname): with open(rname, 'rb') as fp: buf = fp.read()",
"파일 이름 # filename_ex - 압축 파일 내부 파일 이름 # 리턴값 :",
"next_b in ret: # 이미 링크가 존재하면 종료 break ret.append(next_b) except KeyError: break",
"# SBD에 링크 연결하기 else: # 이전 링크가 없다면... ret_link = free_link[:add_num] #",
"self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return org_link_list else: raise Error('Invalid call')",
"없음 return False while len(f): x = f.pop(0) try: if self.pps[x]['Type'] != 1",
"i < r_size / self.ssize)] if len(free_link) >= num: # 여유분이 충분히 존재함...",
"= n if self.verbose: open('bbd.dmp', 'wb').write(self.bbd) print kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd, 0, 0x80) #",
"# 리턴값 : 0 - 성공, 0 이외의 값 - 실패 # ---------------------------------------------------------------------",
"/ (self.bsize/4)) + (1 if (add_num % (self.bsize/4)) else 0) old_num_bbd = kavutil.get_uint32(self.mm,",
"= (self.bsize/4) # 한개의 BBD list 블록에 들어갈 수 있는 Big Block 개수",
"% ((bsize / 4) - 1)) else 0) off = (t_idx % ((bsize",
"len(data) == self.bsize: self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] return True return",
"no def delete(self, del_no): del_pps = self.pps[del_no] prev_no = del_pps['Prev'] next_no = del_pps['Next']",
"ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x > 63: return None return ord(ct[x]) def DecodeStreamName(name):",
"%-23s %d %8X %8X %8X %8X %8d' % (self.pps.index(p), p['Name'], p['Type'], p['Prev'], p['Next'],",
"buf[:off] + '\\x00' * 0x80 elif del_info: buf = buf[:off] + '\\x00' *",
"- 압축 해제할 파일 이름 # 리턴값 : 압축 해제된 내용 or None",
"+ 4:] if pps_dir is not None: t_off = off + 0x4C buf",
"for i in range(len(t) / 4): bbd_list_array.append(kavutil.get_uint32(t, i * 4)) for i, n",
"4)) + (1 if (t_num % ((self.bsize - 4) / 4)) else 0)",
"삭제 if t: self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # OleWriteStream 클래스",
": https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28'",
"0x4840: if ch >= 0x4800: # 0x4800 - 0x483F # only one charecter",
"Block 개수 # --------------------------------------------------------------------- def __add_small_block_num(self, num): root = self.pps[0] r_size = root['Size']",
"# self.mm에 BBD 적용하기 t, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) bbd_list_array",
"# raise error('Not Support : SBD -> BBD') # 섹터가 변화는 것은 Dec,",
"= None self.bsize = None self.ssize = None self.bbd_list_array = None self.bbd =",
"d = d + d o.write_stream('FileHeader', d) o.close() ''' ''' # case1 o",
"!= 0xffffffff: # Next만 존재 # 1. next 노드 값을 root로 보낸다. t_no",
"0x44) num_of_xbbd_blocks = kavutil.get_uint32(self.mm, 0x48) # 추가적인 Big Block을 계산한다. BBD List와 XBBD",
"악성코드 리스트 vlist = list() # 리스트형 변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는 악성코드",
"root['Start'] # SBD 링크를 생성한다. sbd_link = [] for i in range(len(self.sbd) /",
"= get_bblock(buf, next_b, bsize) bbd_list_array += t_data[:-4] next_b = kavutil.get_uint32(t_data, bsize-4) return bbd_list_array[:num_of_bbd_blocks*4],",
"p['Valid'] = False # CVE-2012-0158 검사하기 # pps에 ListView.2의 CLSID가 존재함 # 참고",
"= [] for i in range(len(t) / 4): bbd_list_array.append(kavutil.get_uint32(t, i * 4)) for",
"o = OleFile('normal.hwp', verbose=True) pics = o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'], o.sbd) # d2 =",
"add_num): if add_num < 0: return [] # 전체 BBD 링크를 구한다 bbd_list_array,",
"6) & 0x3f)) och.append(ch) ret_str = '' for ch in och: ret_str +=",
"# 단일 노드 # 1. 0xffffffff 노드 값을 root로 보낸다. t_no = 0xffffffff",
"sbd_list_array for i, no in enumerate(sbd_list_array): data = sbd[i*self.bsize:(i+1)*self.bsize] off = (no +",
"내부의 파일 목록을 얻는다. # 입력값 : filename - 파일 이름 # fileformat",
"node while True: pps = self.pps[no] if pps['Next'] == 0xffffffff: # 더이상 오른쪽이",
"# PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # 기존에는 SBD 사용 # raise",
"no) + self.mm[off+4:]) # --------------------------------------------------------------------- # Small Block을 주어진 개수만큼 추가한다. # num",
"delete(self, name, delete_storage=False, reset_stream=False): for p in self.__full_list: if p['Name'] == name: no",
"num): size = (len(self.mm) / self.bsize) * self.bsize # 파일 크기 self.mm =",
"없다. t_link = self.__modify_small_block_link(None, t_num) bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) self.bbd =",
"end = e break else: for i in range(len(num_list)): num_list.pop(0) end = e",
"sbd_link = [] for i in range(len(self.sbd) / 4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) # 사용하지",
"bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) for i in range(len(bbd_list_array) / 4): no",
"def __modify_sbd(self, sbd): # 원래 이미지에 SBD 덮어쓰기 sbd_no = kavutil.get_uint32(self.mm, 0x3c) #",
"if self.verbose: open('bbd.dm3', 'wb').write(bbd) # 원래 이미지에 BBD 덮어쓰기 self.__modify_bbd(bbd) return ret_link #",
"self.bsize - n sbd += '\\xff' * t if self.verbose: open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd)",
"struct.pack('<L', no) no = t_link[i] bbd = bbd[:no*4] + data + bbd[(no+1)*4:] no",
"XBBD 블록 연결 next_b = xbbd_start_block if num_of_xbbd_blocks == 1: t_data = get_bblock(self.mm,",
"# 검사 대상 정보를 모두 가짐 # 미리 분석된 파일 포맷중에 OLE 파일",
"개수만큼 추가한다. # num : 추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_small_block_num(self,",
"+= 1 # XBBD 처리하기 if total_bbd_num > 109: t_num = (total_bbd_num -",
"생성하기 for i in range(x_num): x_data += '\\xff\\xff\\xff\\xff' * ((self.bsize/4) - 1) if",
"struct.pack('<L', pps_next) + buf[t_off + 4:] if pps_dir is not None: t_off =",
"bbd_fat self.sbd = sbd self.sbd_fat = sbd_fat self.root_list_array = root_list_array self.small_block = small_block",
"# print repr(self.mm[t_off:t_off+4]) # t = get_bblock(self.mm, t_no, self.bsize) # print repr(t) #",
"self.bsize) + (1 if (t_size % self.bsize) else 0) self.__add_big_block_num(t_num) # 필요한 블록",
"(1 if (add_num % (self.bsize / 4)) else 0) if old_b_num == b_num:",
"f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] = True if len(f) == 0: # 정상적인 PPS가 없음",
"없다. t_link = self.__modify_big_block_link(None, t_num) # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data,",
"__get_handle(self, filename) # 압축 파일의 핸들을 얻는다. # 입력값 : filename - 파일",
"one charecter can be decoded ch = MsiBase64Encode(ch - 0x4800) if not ch:",
"링크 삭제 if t: self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # OleWriteStream",
"'D0CF11E0A1B11AE1'.decode('hex'): return True except IOError: pass return False # --------------------------------------------------------------------- # OleFile 클래스",
"# 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # OleWriteStream 클래스 # --------------------------------------------------------------------- class OleWriteStream:",
"0) self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된 마지막 링크 이후에 존재하는",
"self.verbose: print kavutil.vprint('Property Storage') ''' print ' %-2s %-20s %4s %-8s %-8s %-8s",
"name) # if self.pps[node]['Type'] != 5: # Stream만 저장 p = {'Node': node,",
"'wb').write(sbd) self.__modify_sbd(sbd) # 수정된 SDB 적용하기 return ret_link # 연결된 링크 # ---------------------------------------------------------------------",
"디버깅용 self.isfile = False # 파일로 접근 중인가? if isinstance(input_data, types.StringType): if os.path.exists(input_data):",
"def __get_max_node(self, node): # 특정 노드의 Max 값을 가진 node를 찾기 no =",
"arc_name, fname_in_arc): data = None if arc_engine_id == 'arc_ole': o = self.__get_handle(arc_name) fp",
"self.mm = None self.bsize = None self.ssize = None self.bbd_list_array = None self.bbd",
"2 and streams: ret.append(p['Name']) elif p['Type'] == 1 and storages: ret.append(p['Name']) else: pass",
"self.pps[no]['Start'] org_size = self.pps[no]['Size'] ''' if org_size >= 0x1000: # read_size = self.bsize",
"= self.sbd # org_list_array = get_block_link(org_sb, fat) ''' # 수정된 data를 쓰기 위해",
"= get_bbd_list_array(self.mm) for i in range(len(bbd_list_array) / 4): no = kavutil.get_uint32(bbd_list_array, i *",
"0x4800 - 0x483F # only one charecter can be decoded ch = MsiBase64Encode(ch",
"else 0) old_num_bbd = kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block = kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(self.mm,",
"분석 def close(self): if self.isfile: self.fp.close() if self.write_mode: open(self.fname, 'wb').write(self.mm) # --------------------------------------------------------------------- #",
"출력 함수 # ------------------------------------------------------------------------- __version__ = '1.0' # ------------------------------------------------------------------------- # 엔진 오류 메시지를",
"선언 vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는 악성코드 이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return vlist #",
"+ '\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:] self.__modify_sbd(sbd) else: # SBD를 사용한다. if org_size >= 0x1000:",
"self.verbose: open('bbd.dm3', 'wb').write(bbd) # 원래 이미지에 BBD 덮어쓰기 self.__modify_bbd(bbd) return ret_link # 연결된",
"' % p['Prev'] t += ' - ' if p['Next'] == 0xffffffff else",
"데이터 블록 self.mm += attach_data else: special_no = [] # 특수 목적의 Big",
"add_num < 0: return [] sbd = self.sbd if self.verbose: open('sbd.dm2', 'wb').write(sbd) #",
"= [] self.parse() # OLE 파일을 분석 def close(self): if self.isfile: self.fp.close() if",
"0x1e) if idx >= num_of_bbd_blocks: # 범위를 벗어나면 에러 return -1 if idx",
"# 해당 정보를 가진 root를 찾기 for i, pps in enumerate(self.pps): if pps['Prev']",
"이미지에 BBD 덮어쓰기 self.__modify_bbd(bbd) return ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD",
"성공 여부 (True or False) # --------------------------------------------------------------------- def mkarc(self, arc_engine_id, arc_name, file_infos): if",
"이름 # fileformat - 파일 포맷 분석 정보 # 리턴값 : [[압축 엔진",
"size=len(data), start=t_link[0]) # 이전 BBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.bbd)",
"# OLE Stream 목록 추출하기 o = self.__get_handle(filename) for name in o.listdir(): file_scan_list.append(['arc_ole',",
"in t[1:]: t_link[i] = 0xffffffff # SBD 배열을 SBD 버퍼로 바꾸기 self.sbd =",
"start=t_link[0]) # 이전 BBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.bbd) #",
"verbose=False): bbd_list_array = buf[0x4c:0x200] # 전체 bbd_list num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block =",
"if len(org_link_list) > num_link: # SBD를 배열로 바꾸기 t_link = [] for i",
"/ bsize) * bsize if fsize > rsize: fileformat = { # 포맷",
"제작자 info['version'] = '1.1' # 버전 info['title'] = 'OLE Library' # 엔진 설명",
"d) o.close() ''' ''' # case1 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics =",
"len(buf) # print '[-] rname :', o.write_stream(a_name, buf) # zfile.writestr(a_name, buf) else: #",
"= ord(pps[0x42]) p['Prev'] = kavutil.get_uint32(pps, 0x44) p['Next'] = kavutil.get_uint32(pps, 0x48) p['Dir'] = kavutil.get_uint32(pps,",
"in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return False self.pps.append(p) # PPS Tree 검증 if self.__valid_pps_tree() is",
"break if len(ret) % 10000 == 0: if next_b in ret: # 이미",
"# 분석된 PPS가 없으면 종료 return False if self.pps[0]['Dir'] != 0xffffffff and self.pps[0]['Type']",
"[] for i in range(len(self.sbd) / 4): t_link.append(kavutil.get_uint32(self.sbd, i * 4)) t =",
"- 압축 엔진 ID # arc_name - 압축 파일 # fname_in_arc - 압축",
"b_num: break else: old_b_num = b_num total_bbd_num = old_num_bbd + b_num # 전체",
"+ free_link[:add_num] # SBD에 링크 연결하기 else: # 이전 링크가 없다면... ret_link =",
"is not None: t_off = off + 0x44 buf = buf[:t_off] + struct.pack('<L',",
"-> foo d = zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d) o.close() ''' # ------------------------------------------------------------------------- # KavMain",
"t_size = min(kavutil.get_uint16(pps, 0x40), 0x40) if t_size != 0: # 출력시 이름이 깨질",
"= (size / self.bsize) - 2 # 실제 마지막 Big Block 번호 n",
"None return ord(ct[x]) def DecodeStreamName(name): wch = [] och = [] for i",
"= kavutil.get_uint32(pps, 0x74) p['Size'] = kavutil.get_uint32(pps, 0x78) p['Valid'] = False # CVE-2012-0158 검사하기",
"x loop = True continue else: while loop: if e == num_list.pop(0): break",
"할 XBBD 개수 add_num += x_num b_num = (add_num / (self.bsize / 4))",
"0xffffffff and next_no != 0xffffffff: # Next만 존재 # 1. next 노드 값을",
"+ 1) * self.bsize self.mm = self.mm[:off] + t_data[i * self.bsize:(i + 1)",
"이전 링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 sbd =",
"+ add_big_num * self.bsize) # --------------------------------------------------------------------- # BBD link 추가 요청한다. (원본 이미지의",
"class Stream: def __init__(self, parent, node): self.parent = parent self.node = node self.read_size",
"인코더 디코더 # --------------------------------------------------------------------- def MsiBase64Encode(x): ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x > 63:",
"= kavutil.get_uint32(self.mm, 0x40) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) self.sbd = '' for no in",
"size >= 0x1000: t_list = list(list_array) while len(t_list): s, e = self.get_liner_value(t_list) #",
"# raise error('Not Support : BBD -> SBD') # 섹터가 변화는 것은 Dec,",
"error('Not Support : SBD -> SBD (Inc)') # 작업 완료 n = (len(data)",
"# add_data = '' add_num = num - n # 추가해야 할 블록",
"{} mm = filehandle # OLE 헤더와 동일 if mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole']",
"loop = True continue else: while loop: if e == num_list.pop(0): break end",
"del_pps['Prev'] next_no = del_pps['Next'] dir_no = del_pps['Dir'] # root를 찾기 root_no = self.__get_root_node(del_no)",
"= [] # 취약점 존재 여부 # 임시 변수 self.__deep = None self.__full_list",
"self.verbose: print kavutil.vprint('Small Blocks') print self.small_block return True # --------------------------------------------------------------------- # PPS Tree의",
"전체 경로 구하기 (스트림만 출력) # --------------------------------------------------------------------- def listdir(self, streams=True, storages=False): ret =",
"= kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if idx >= num_of_bbd_blocks:",
"file_scan_list.append(['arc_ole', name]) return file_scan_list except: pass return [] # --------------------------------------------------------------------- # unarc(self, arc_engine_id,",
"= -1 if no == -1: raise Error('PPS name(%s) is invalid.' % name)",
"'%4d ' % p['Next'] t += ' - ' if p['Dir'] == 0xffffffff",
"취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return False else: # CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return False self.pps[x]['Valid'] =",
"# Small Block을 주어진 개수만큼 추가한다. # num : 추가할 Big Block 개수",
"압축 파일 내부 파일 이름 # 리턴값 : {파일 포맷 분석 정보} or",
"kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(self.mm, 0x48) # 추가적인 Big Block을 계산한다. BBD List와",
"num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) ''' # 상당히 많은 데이터가 출력되어",
"'' add_num = num - n # 추가해야 할 블록 수 add_data =",
"# 스트림을 연다 # --------------------------------------------------------------------- def openstream(self, name): # ----------------------------------------------------------------- # 스트림 전용",
"data = fp.read() except: data = None return data # --------------------------------------------------------------------- # arcclose(self)",
"x_data += '\\xfe\\xff\\xff\\xff' # 마지막 블록의 링크는 끝을 처리함 special_no.append(last_no) # 특수 블록",
"0x80) # Root 읽기 root_startblock = kavutil.get_uint32(self.mm, 0x30) root_list_array = get_block_link(root_startblock, self.bbd_fat) self.root_list_array",
"bbd_list_array = buf[0x4c:0x200] # 전체 bbd_list num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf,",
"SBD 링크를 생성한다. sbd_link = [] for i in range(len(self.sbd) / 4): sbd_link.append(kavutil.get_uint32(self.sbd,",
"is not None: t_off = off + 0x48 buf = buf[:t_off] + struct.pack('<L',",
"= old_link[-1:] + free_link[:add_num] # SBD에 링크 연결하기 else: # 이전 링크가 없다면...",
"except KeyError: break return ret # --------------------------------------------------------------------- # OLE 블록 읽기 # ---------------------------------------------------------------------",
"pps_next=t_no) else: # Dir self.__set_pps_header(root_no, pps_dir=t_no) # 삭제 노드 값은 모두 지우기 self.__set_pps_header(del_no,",
"seg*4)) t_no = kavutil.get_uint32(bbd_list_array, seg*4) t_off = ((t_no + 1) * self.bsize) +",
"kavutil.get_uint32(self.mm, 0x3c) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) for i, n in enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize])",
"bbd += get_bblock(self.mm, no, self.bsize) bbd_link = [] for i in range(len(bbd) /",
"1) * 4:] # SBD가 나누어 bsize 단위가 아니면 맞춘다. n = len(sbd)",
"정보를 얻는다 org_sb = self.pps[no]['Start'] org_size = self.pps[no]['Size'] ''' if org_size >= 0x1000:",
"# 특수 목적의 Big Block 번호. 해당 블록은 0xfffffffd로 처리해야 함 x_data =",
"- 압축 파일 내부 파일 이름 # 리턴값 : {파일 포맷 분석 정보}",
"+= (n % 8) * self.ssize self.mm = self.mm[:off] + t_data[i * self.ssize:(i",
"번호 n = (len(self.bbd) / 4 - 1) - last_no if n >=",
"0xffffffff)] if old_link: ret_link = old_link + free_link[:add_num] # 최종 결과의 BBD 링크",
"+ '\\x00' * 0x80 elif del_info: buf = buf[:off] + '\\x00' * 0x80",
"return data[:size] def close(self): pass # ----------------------------------------------------------------- for p in self.__full_list: if p['Name']",
"sbd 읽기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks = kavutil.get_uint32(self.mm, 0x40) sbd_list_array = get_block_link(sbd_startblock,",
"t: self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # 스트림 또는 스토리지를 삭제한다.",
"10000 == 0: if next_b in ret: # 이미 링크가 존재하면 종료 break",
"--------------------------------------------------------------------- class OleFile: def __init__(self, input_data, write_mode=False, verbose=False): self.verbose = verbose # 디버깅용",
"self.parent = parent self.node = node self.read_size = 0 self.fat = None #",
"-> SBD (Dec)') # 지원 완료 n = (len(data) / self.ssize) + (1",
"for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize)",
"self.pps[node]['Valid'] is False: # 유효한 PPS만 처리함 return 0 pps_name = self.pps[node]['Name'].encode('cp949', 'ignore')",
"0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) # num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1",
"범위를 벗어나면 에러 return -1 if idx <= 109: return 0x4c + (idx",
"4): sbd_link.append(kavutil.get_uint32(sbd, i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link = [i for",
"+ 0x74 buf = buf[:t_off] + struct.pack('<L', start) + buf[t_off + 4:] if",
"끝 설정하기 # 남은 링크는 모두 0xffffffff로 설정하기 for i in t[1:]: t_link[i]",
"디코더 # --------------------------------------------------------------------- def MsiBase64Encode(x): ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x > 63: return",
"BBD 리스트를 얻는다. # --------------------------------------------------------------------- def get_bbd_list_array(buf, verbose=False): bbd_list_array = buf[0x4c:0x200] # 전체",
"if (len(data) % self.ssize) else 0) t_data = data + ('\\x00' * ((n",
"num_of_bbd_blocks > 109: # bbd list 개수가 109보다 크면 xbbd를 가져와야 함 next_b",
"# fileformat - 파일 포맷 분석 정보 # 리턴값 : [[압축 엔진 ID,",
"return org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- # BBD 링크를 줄인다 #",
"= struct.pack('<L', no) no = t_link[i] bbd = bbd[:no*4] + data + bbd[(no+1)*4:]",
"'<NAME>' # 제작자 info['version'] = '1.1' # 버전 info['title'] = 'OLE Library' #",
"== 0x1), 'encrypt': (val & 0x2 == 0x2), 'viewtext': (val & 0x4 ==",
"링크 수 # --------------------------------------------------------------------- def __decrease_bbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: #",
"Prev만 존재 # 1. prev 노드 값을 root로 보낸다. t_no = prev_no elif",
"0, 0x80) # sbd 읽기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks = kavutil.get_uint32(self.mm, 0x40)",
"1 if self.pps[node]['Prev'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix) if self.pps[node]['Next'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix)",
"start, end def read(self): pps = self.parent.pps[self.node] sb = pps['Start'] size = pps['Size']",
"처리하기 if total_bbd_num > 109: t_num = (total_bbd_num - 109) total_xbbd_num = (t_num",
"no) no = t_link[i] sbd = sbd[:no*4] + data + sbd[(no+1)*4:] no =",
"+ pps_name else: if self.pps[node]['Valid'] is False: # 유효한 PPS만 처리함 return 0",
"BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) # BBD를 모은다 bbd",
"# 이전 링크에 필요한 블록 수 추가하여 링크를 새롭게 생성 # Root 크기",
"# SBD가 나누어 bsize 단위가 아니면 맞춘다. n = len(sbd) % self.bsize if",
"# 리턴값 : 압축 성공 여부 (True or False) # --------------------------------------------------------------------- def mkarc(self,",
"+ 4:] if start is not None: t_off = off + 0x74 buf",
"kavutil.get_uint16(buf, 0x1e) if idx >= num_of_bbd_blocks: # 범위를 벗어나면 에러 return -1 if",
"(원본 이미지의 SBD link가 수정 됨) # old_link : 기존 SBD link #",
"''' # 늘어나는건 경우의 수가 너무 많음 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics",
"블록을 수집한다. t_link = self.__modify_big_block_link(t_link, t_num) # Big block 영역에 bsize 만큼씩 Overwrite",
"of XBBD # BBD 추가하기 bbd_no = [] b_data = '\\xff' * self.bsize",
"https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ]",
"없음 n = (len(data) / self.ssize) + (1 if (len(data) % self.ssize) else",
"--------------------------------------------------------------------- def MsiBase64Encode(x): ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x > 63: return None return",
"# OLE 주요 데이터 self.mm = buf self.bsize = 0 self.ssize = 0",
"o.openstream('FileHeader') d = pics.read() d = d + d o.write_stream('FileHeader', d) o.close() '''",
"self.__add_big_block_num(add_big_num) # Big Block 추가 요청 # t_link = get_block_link(r_no, self.bbd) # 이전",
"BBD -> BBD (Dec)') # 개발 완료 n = (len(data) / self.bsize) +",
"# 새롭게 OLE 재로딩 elif target_pps['Valid'] and target_pps['Type'] == 1 and delete_storage: #",
"악성코드 치료 후 재압축 유무 info['sig_num'] = len(self.listvirus()) # 진단/치료 가능한 악성코드 수",
"함으로 1를 더함 else: x_data += '\\xfe\\xff\\xff\\xff' # 마지막 블록의 링크는 끝을 처리함",
"= total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수 add_num += x_num b_num",
"수정 모드 self.write_mode = write_mode # OLE 주요 데이터 self.mm = None self.bsize",
"종료 break ret.append(next_b) except KeyError: break return ret # --------------------------------------------------------------------- # OLE 블록",
"in range(len(name) / 2): wch.append(kavutil.get_uint16(name, i * 2)) for ch in wch: if",
"'\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if pps[0x50:0x60] in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return False self.pps.append(p) #",
"--------------------------------------------------------------------- # OleFile 클래스 # --------------------------------------------------------------------- class OleFile: def __init__(self, input_data, write_mode=False, verbose=False):",
"% self.bsize) else 0) self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된 마지막",
"필요함 def get_liner_value(self, num_list): start = None end = None if not start:",
"# 다음 블록을 가리켜야 함으로 1를 더함 else: x_data += '\\xfe\\xff\\xff\\xff' # 마지막",
"zfile # --------------------------------------------------------------------- # arclist(self, filename, fileformat) # 압축 파일 내부의 파일 목록을",
"buf self.bsize = 0 self.ssize = 0 # 임시 변수 self.__deep = 0",
"while len(t_list): s, e = self.get_liner_value(t_list) # 연속된 링크를 모두 수집해서 한꺼번에 파일로",
"filehandle - 파일 핸들 # filename - 파일 이름 # filename_ex - 압축",
"덮어쓰기 self.__modify_bbd(bbd) return ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD link 추가",
"%-4s %-4s %8s %8s' % ('No', 'Name', 'Type', 'Prev', 'Next', ' Dir', 'SB',",
"return True return False # --------------------------------------------------------------------- # PPS 헤더에 존재하는 특정 스트림의 크기를",
"주요 데이터 self.mm = None self.bsize = None self.ssize = None self.bbd_list_array =",
"(t_size / self.ssize) + (1 if (t_size % self.ssize) else 0) self.__add_small_block_num(t_num) #",
"rname, len(buf) # print '[-] rname :', o.write_stream(a_name, buf) # zfile.writestr(a_name, buf) else:",
"+ (1 if (t_num % ((self.bsize - 4) / 4)) else 0) x_num",
"return False # --------------------------------------------------------------------- # PPS 헤더에 존재하는 특정 스트림의 크기를 조정한다. (내장)",
"i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i] = n if",
"('-' * 74) for p in self.pps: print ' ' + '%2d %-23s",
"last_no += 1 # END of XBBD # BBD 추가하기 bbd_no = []",
"!= 'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not Ole signature') # big block, small bloc 크기 구하기",
"self.fat) data = '' if size >= 0x1000: t_list = list(list_array) while len(t_list):",
"True: pps = self.pps[no] if pps['Next'] == 0xffffffff: # 더이상 오른쪽이 없으면 탐색",
"0xffffffff: if self.pps[x]['Prev'] in scaned_pps_node: self.pps[x]['Prev'] = 0xffffffff else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next']",
"node, 'Name': name[1:], 'Type': self.pps[node]['Type']} self.__full_list.append(p) if self.pps[node]['Dir'] != 0xFFFFFFFFL: self.__deep += 1",
"p['Dir'] t += ' - ' if p['Start'] == 0xffffffff else '%8X '",
"Root를 늘려야 함 size = num * self.ssize # 추가해야 할 용량 add_big_num",
"0x60) print if self.bsize % 0x200 != 0 or self.ssize != 0x40: #",
"0x383F # the value contains two characters ch -= 0x3800 och.append(MsiBase64Encode(ch & 0x3f))",
"--------------------------------------------------------------------- # init(self, plugins_path) # 플러그인 엔진을 초기화 한다. # 인력값 : plugins_path",
"r_size = root['Size'] r_no = root['Start'] # SBD 링크를 생성한다. sbd_link = []",
"} ret['ff_attach'] = fileformat # HWP 인가? o = OleFile(filename) try: pics =",
"== 0xffffffff and next_no == 0xffffffff: # 단일 노드 # 1. 0xffffffff 노드",
"# o.test() ''' # 늘어나는건 경우의 수가 너무 많음 o = OleFile('normal.hwp', write_mode=True,",
"* self.parent.ssize data += self.parent.mm[off:off + self.read_size] if self.parent.verbose: print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0,",
"t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_bbd_link(t_link, n) #",
"self.handle.keys(): zfile = self.handle[fname] zfile.close() self.handle.pop(fname) # --------------------------------------------------------------------- # mkarc(self, arc_engine_id, arc_name, file_infos)",
"(x_num-1): x_data += struct.pack('<L', last_no+1) # 다음 블록을 가리켜야 함으로 1를 더함 else:",
"닫는다. # --------------------------------------------------------------------- def arcclose(self): for fname in self.handle.keys(): zfile = self.handle[fname] zfile.close()",
"# --------------------------------------------------------------------- # Big Block을 주어진 개수만큼 추가한다. # num : 추가할 Big",
"+= x_num b_num = (add_num / (self.bsize / 4)) + (1 if (add_num",
"+ (1 if (t_size % self.ssize) else 0) self.__add_small_block_num(t_num) # 필요한 블록 수",
"ListView.2의 CLSID가 존재함 # 참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids =",
"self.ssize = 1 << kavutil.get_uint16(self.mm, 0x20) if self.verbose: kavutil.vprint('Header') kavutil.vprint(None, 'Big Block Size',",
"'Size') print ' ' + ('-' * 74) for p in self.pps: if",
"% self.bsize) else 0) t_data = data + ('\\x00' * ((n * self.bsize)",
"self.bbd_fat) # 이전 링크 수집하기 bbd = self.bbd for no in t_link: bbd",
"n, self.bsize) kavutil.HexDump().Buffer(buf, 0, 0x200) # --------------------------------------------------------------------- # SBD 링크를 줄인다 # org_link_list",
"= buf self.bsize = 0 self.ssize = 0 # 임시 변수 self.__deep =",
"= no % bb_num # print hex(no), hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array, seg*4)) t_no =",
"if not start: start = num_list.pop(0) e = start loop = False for",
"'Type', 'Prev', 'Next', 'Dir', 'SB', 'Size') print ' ' + ('-' * 74)",
"스트림 또는 스토리지를 삭제한다. # --------------------------------------------------------------------- def delete(self, name, delete_storage=False, reset_stream=False): for p",
"i in range(len(num_list)): num_list.pop(0) end = e return start, end def read(self): pps",
"0) old_num_bbd = kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block = kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(self.mm, 0x48)",
"self.bsize # 몇개의 블록이 필요한가? self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # BBD",
"-1 if no == -1: raise Error('PPS name is invalid.') # print no",
"os import sys import struct import types import kernel import kavutil # -------------------------------------------------------------------------",
"self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) target_pps = self.pps[no] if target_pps['Valid']",
"self.__set_pps_header(0, size=r_size + add_big_num * self.bsize) # --------------------------------------------------------------------- # BBD link 추가 요청한다.",
"# XBBD 블록 연결 next_b = xbbd_start_block if num_of_xbbd_blocks == 1: t_data =",
"# only one charecter can be decoded ch = MsiBase64Encode(ch - 0x4800) if",
"= x loop = True continue else: while loop: if e == num_list.pop(0):",
"# 참고 : https://www.symantec.com/security_response/attacksignatures/detail.jsp?asid=25657 cve_clsids = ['\\x4B\\xF0\\xD1\\xBD\\x8B\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if",
"range(len(t) / 4): bbd_list_array.append(kavutil.get_uint32(t, i * 4)) for i, n in enumerate(bbd_list_array): self.__set_bblock(n,",
"0x4c, num_of_bbd_blocks * 4) else: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 109) next_b = xbbd_start_block",
"+ self.mm[t_off+4:] # print repr(self.mm[t_off:t_off+4]) # t = get_bblock(self.mm, t_no, self.bsize) # print",
"[0] # 이미 분석한 노드의 경우 더이상 분석하지 않기 위해 처리 f =",
"# 미리 분석된 파일 포맷중에 OLE 파일 포맷이 있는가? if 'ff_ole' in fileformat:",
"n in list_array: div_n = self.parent.bsize / self.parent.ssize off = (self.parent.small_block[n / div_n]",
"= get_bblock(self.mm, n, self.bsize) kavutil.HexDump().Buffer(buf, 0, 0x200) # --------------------------------------------------------------------- # SBD 링크를 줄인다",
"개수 self.mm = self.mm[:0x2c] + struct.pack('<L', total_bbd_num) + self.mm[0x30:] last_no += 1 #",
"늘려야 함 size = num * self.ssize # 추가해야 할 용량 add_big_num =",
"1 << kavutil.get_uint16(mm, 0x1e) rsize = (fsize / bsize) * bsize if fsize",
"else 0) x_num = total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수 add_num",
"리턴값 : {파일 포맷 분석 정보} or None # --------------------------------------------------------------------- def format(self, filehandle,",
"def __init__(self, parent, node): self.parent = parent self.node = node self.read_size = 0",
"else: zfile = OleFile(filename, verbose=self.verbose) # ole 파일 열기 self.handle[filename] = zfile return",
"if 'ff_ole' in fileformat: try: # OLE Stream 목록 추출하기 o = self.__get_handle(filename)",
"buf): # OLE 주요 데이터 self.mm = buf self.bsize = 0 self.ssize =",
"= 0 self.__full_list = [] try: self.__get_pps_path() except IndexError: pass # small block",
"= self.__modify_big_block_link(None, t_num) # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) #",
"0xffffffff로 설정하기 for i in t[1:]: t_link[i] = 0xffffffff # SBD 배열을 SBD",
"b_data + add_data + attach_data # 특수 블록에 BBD list도 추가 special_no +=",
"# -*- coding:utf-8 -*- # Author: <NAME>(<EMAIL>) import os import sys import struct",
"'Type', 'Prev', 'Next', ' Dir', 'SB', 'Size') print ' ' + ('-' *",
"* 109) next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(self.mm, next_b,",
"' + '%2d %-35s %d %22s %8d' % (self.pps.index(p), tname, p['Type'], t, p['Size'])",
"1 # END of XBBD # BBD 추가하기 bbd_no = [] b_data =",
"p = {} pps = self.root[i*0x80:(i+1)*0x80] t_size = min(kavutil.get_uint16(pps, 0x40), 0x40) if t_size",
"'ole' # 엔진 파일 이름 info['make_arc_type'] = kernel.MASTER_PACK # 악성코드 치료 후 재압축",
"block 갱신 self.bbd_fat = {} for i in range(len(self.bbd) / 4): n =",
"맞춘다. n = len(sbd) % self.bsize if n: t = self.bsize - n",
"len(free_link) >= num: # 여유분이 충분히 존재함... return # 추가할 필요 없음 else:",
"* self.bsize] off = (no + 1) * self.bsize self.mm = self.mm[:off] +",
"listvirus(self) # 진단/치료 가능한 악성코드의 리스트를 알려준다. # 리턴값 : 악성코드 리스트 #",
"'Big Block Size', '%d' % self.bsize) kavutil.vprint(None, 'Small Block Size', '%d' % self.ssize)",
"파일 이름 info['make_arc_type'] = kernel.MASTER_PACK # 악성코드 치료 후 재압축 유무 info['sig_num'] =",
"dir_no = del_pps['Dir'] # root를 찾기 root_no = self.__get_root_node(del_no) # 양쪽 노드가 존재하는가?",
"filehandle, filename, filename_ex) # 파일 포맷을 분석한다. # 입력값 : filehandle - 파일",
"t = kavutil.get_uint32(t, off*4) # print hex(t) # BBD List에 BBD 등록하기 for",
"# 임시 변수 self.__deep = 0 self.__full_list = [] self.parse() # OLE 파일을",
"== 0xfffffffe: return -1 t_buf = get_bblock(buf, next_b, bsize) next_b = kavutil.get_uint32(t_buf, bsize-4)",
"결과의 BBD 링크 t_link = old_link[-1:] + free_link[:add_num] # BBD에 링크 연결하기 else:",
"uninit(self) # 플러그인 엔진을 종료한다. # 리턴값 : 0 - 성공, 0 이외의",
"open(self.fname, 'wb').write(self.mm) # --------------------------------------------------------------------- # OLE 파싱하기 # --------------------------------------------------------------------- def parse(self): buf =",
"self.bsize = 0 self.ssize = 0 # 임시 변수 self.__deep = 0 self.__full_list",
"+ 0x44 buf = buf[:t_off] + struct.pack('<L', pps_prev) + buf[t_off + 4:] if",
"크기를 data 뒤쪽에 추가하기 # t_link = get_block_link(org_sb, self.sbd) # 이전 링크 수집하기",
"BBD list 개수는 한개의 BBD에는 bsize / 4 개수만큼 Big Block을 담을 수",
"쓸모 없는 부분은 제거 attach_data = self.mm[size:] # 파일 뒤에 붙어 있는 잔여",
"in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) bbd_link =",
"# --------------------------------------------------------------------- # listvirus(self) # 진단/치료 가능한 악성코드의 리스트를 알려준다. # 리턴값 :",
"# 유효한 PPS만 처리함 return 0 pps_name = self.pps[node]['Name'].encode('cp949', 'ignore') name = prefix",
"(1 if (len(data) % self.bsize) else 0) t_data = data + ('\\x00' *",
"엔진의 주요 정보 info = dict() # 사전형 변수 선언 info['author'] = '<NAME>'",
"buf[off:off+bsize] # --------------------------------------------------------------------- # OLE의 BBD 리스트를 얻는다. # --------------------------------------------------------------------- def get_bbd_list_array(buf, verbose=False):",
"<NAME>(<EMAIL>) import os import sys import struct import types import kernel import kavutil",
"if pps_prev is not None: t_off = off + 0x44 buf = buf[:t_off]",
"# filename - 파일 이름 # filename_ex - 압축 파일 내부 파일 이름",
"# Root 읽기 root_startblock = kavutil.get_uint32(self.mm, 0x30) root_list_array = get_block_link(root_startblock, self.bbd_fat) self.root_list_array =",
"self.pps[root_no] if pps['Prev'] == del_no: self.__set_pps_header(root_no, pps_prev=t_no) elif pps['Next'] == del_no: self.__set_pps_header(root_no, pps_next=t_no)",
"num_list): start = None end = None if not start: start = num_list.pop(0)",
"BBD link # add_num : 추가 BBD link 개수 # --------------------------------------------------------------------- def __modify_big_block_link(self,",
"개수 # XBBD를 위한 헤더 수정 if num_of_xbbd_blocks == 0: data = struct.pack('<LL',",
"o.close() ''' ''' # case1 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('Scripts/DefaultJScript')",
"여분의 크기를 data 뒤쪽에 추가하기 # t_link = get_block_link(org_sb, self.sbd) # 이전 링크",
"(len(t_link) * self.ssize) < len(t_data): # 블록 추가해야 하나? t_size = len(t_data) -",
"org_size >= 0x1000: # 기존에는 BBD 사용 if org_size >= len(data): # raise",
"압축 성공 여부 (True or False) # --------------------------------------------------------------------- def mkarc(self, arc_engine_id, arc_name, file_infos):",
"next_b, self.bsize) next_b = kavutil.get_uint32(t_data, self.bsize-4) # 기존 XBBD 마지막에 새로운 XBBD 링크",
"--------------------------------------------------------------------- def arclist(self, filename, fileformat): file_scan_list = [] # 검사 대상 정보를 모두",
"self.ssize) else 0) self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된 마지막 링크",
"self.handle.pop(fname) # --------------------------------------------------------------------- # mkarc(self, arc_engine_id, arc_name, file_infos) # 입력값 : arc_engine_id -",
"CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return False self.pps[x]['Valid'] = True if self.pps[x]['Prev'] != 0xffffffff: if",
"수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_num = 0 if",
"b_num = (add_num / (self.bsize/4)) + (1 if (add_num % (self.bsize/4)) else 0)",
"] if pps[0x50:0x60] in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return False self.pps.append(p) # PPS Tree 검증",
"= OleFile(arc_name, write_mode=True) # , verbose=True) # zfile = zipfile.ZipFile(arc_name, 'w') for file_info",
"rsize } ret['ff_attach'] = fileformat # HWP 인가? o = OleFile(filename) try: pics",
"크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_link",
"유무 info['sig_num'] = len(self.listvirus()) # 진단/치료 가능한 악성코드 수 return info # ---------------------------------------------------------------------",
"block, small bloc 크기 구하기 self.bsize = 1 << kavutil.get_uint16(self.mm, 0x1e) self.ssize =",
"self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 BBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb,",
"새롭게 OLE 재로딩 # --------------------------------------------------------------------- # 스트림 또는 스토리지를 삭제한다. # --------------------------------------------------------------------- def",
"for no in t_link: sbd = sbd[:no*4] + '\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:] self.__modify_sbd(sbd) else:",
"self.mm[:off] + data + self.mm[off+self.bsize:] # --------------------------------------------------------------------- # BBD를 수정한다. # bbd :",
"SBD를 수정한다. # sbd : 수정된 SBD 이미지 # --------------------------------------------------------------------- def __modify_sbd(self, sbd):",
"get_block_link(sbd_startblock, self.bbd_fat) self.sbd = '' for no in sbd_list_array: self.sbd += get_bblock(self.mm, no,",
"p in self.__full_list: if p['Name'] == name: return True else: return False #",
"xbbd_start_block if num_of_xbbd_blocks == 1: t_data = get_bblock(self.mm, next_b, self.bsize) else: t_data =",
"엔진의 주요 정보를 알려준다. (제작자, 버전, ...) # 리턴값 : 플러그인 엔진 정보",
"len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data) / self.ssize #",
"''' # case1 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('Scripts/DefaultJScript') d =",
"= (s + 1) * self.read_size data += self.parent.mm[off:off + self.read_size * (e",
"<reponame>nursultanramazanov/123 # -*- coding:utf-8 -*- # Author: <NAME>(<EMAIL>) import os import sys import",
"self.bsize = bsize self.ssize = ssize self.bbd = bbd self.bbd_fat = bbd_fat self.sbd",
"%d %22s %8d' % (self.pps.index(p), tname, p['Type'], t, p['Size']) # PPS 전체 경로",
"Next만 존재 # 1. next 노드 값을 root로 보낸다. t_no = next_no else:",
"이후에 존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_small_block_link(t_link, t_num) # Small block",
"' ' + '%2d %-23s %d %8X %8X %8X %8X %8d' % (self.pps.index(p),",
"파일 정보 구조체 # 리턴값 : 압축 성공 여부 (True or False) #",
"in wch: if 0x3800 <= ch <= 0x4840: if ch >= 0x4800: #",
"몇개의 블록이 필요한가? self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # BBD 링크를 처음",
"동일 if mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] = 'OLE' # OLE 뒤에 첨부된 파일이",
"# OLE의 BBD 리스트를 얻는다. # --------------------------------------------------------------------- def get_bbd_list_array(buf, verbose=False): bbd_list_array = buf[0x4c:0x200]",
"= e break else: for i in range(len(num_list)): num_list.pop(0) end = e return",
"ow.write(no, data) if t: self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # 스트림",
"+ self.ssize:] # --------------------------------------------------------------------- # OLE 영역의 특정 위치에 1개의 Big Block Overwrite하기",
"bbd_list num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48)",
"모두 노트가 존재함 # 1. prev 노드 값을 root로 보낸다. t_no = prev_no",
"no = node while True: pps = self.pps[no] if pps['Next'] == 0xffffffff: #",
"old_link : 기존 SBD link # add_num : 추가 SBD link 개수 #",
"= buf[:off] + '\\x00' * 0x80 elif del_info: buf = buf[:off] + '\\x00'",
"Block을 계산한다. BBD List와 XBBD 블록도 추가될 수 있기 때문에... old_b_num = b_num",
"개수 # --------------------------------------------------------------------- def __add_small_block_num(self, num): root = self.pps[0] r_size = root['Size'] r_no",
"+ 1) * self.bsize] + self.mm[off + self.bsize:] # --------------------------------------------------------------------- # 특정 데이터를",
"% self.bsize) else 0) # 추가해야 할 Big Block 개수 self.__add_big_block_num(add_big_num) # Big",
", verbose=True) # zfile = zipfile.ZipFile(arc_name, 'w') for file_info in file_infos: rname =",
"= self.parent.bsize / self.parent.ssize off = (self.parent.small_block[n / div_n] + 1) * self.parent.bsize",
"(t_idx % ((bsize / 4) - 1)) else 0) off = (t_idx %",
"for i in range(seg): if next_b == 0xfffffffe: return -1 t_buf = get_bblock(buf,",
"scaned_pps_node: self.pps[x]['Prev'] = 0xffffffff else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next'] != 0xffffffff: if self.pps[x]['Next']",
"kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd, 0, 0x80) # Root 읽기 root_startblock = kavutil.get_uint32(self.mm, 0x30) root_list_array",
"# var -> foo d = zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d) o.close() ''' # -------------------------------------------------------------------------",
"sbd = sbd[:no*4] + data + sbd[(no+1)*4:] no = t_link[-1] sbd = sbd[:no",
"얻기 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) if self.verbose: print kavutil.vprint('Small Blocks') print self.small_block return",
"buf = get_bblock(self.mm, n, self.bsize) off = ((node % 4) * 0x80) if",
"이전 링크 수집하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 bbd =",
"OleFile(filename, verbose=self.verbose) # ole 파일 열기 self.handle[filename] = zfile return zfile # ---------------------------------------------------------------------",
"wch = [] och = [] for i in range(len(name) / 2): wch.append(kavutil.get_uint16(name,",
"if len(data) == self.bsize: self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] return True",
"--------------------------------------------------------------------- def __modify_bbd(self, bbd): self.bbd = bbd # 체크 !!! bbd_list_array, _, _,",
"개수 체크하기 last_no = (size / self.bsize) - 2 # 실제 마지막 Big",
"print ret_str.decode('UTF-16LE', 'replace') return ret_str # --------------------------------------------------------------------- # OLE 내부 링크 구하기 #",
"if os.path.exists(input_data): self.isfile = True self.fname = input_data self.fp = open(input_data, 'rb') buf",
"t_link: sbd = sbd[:no*4] + '\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:] self.__modify_sbd(sbd) else: # SBD를 사용한다.",
"except IndexError: if (x & 0x90900000) == 0x90900000: # CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return",
"self.bbd = '' for i in t_link: self.bbd += struct.pack('<L', i) # self.mm에",
"여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data) / self.bsize # 몇개의 블록이",
"ch -= 0x3800 och.append(MsiBase64Encode(ch & 0x3f)) ch = MsiBase64Encode(((ch >> 6) & 0x3f))",
"# 여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크",
"self.small_block = None self.root_list_array = None self.exploit = [] # 취약점 존재 여부",
"# 섹터가 변화는 것은 Dec, Inc가 의미 없음 n = (len(data) / self.bsize)",
"* self.bsize] + self.mm[off + self.bsize:] # --------------------------------------------------------------------- # 특정 데이터를 small block",
"압축 파일 핸들을 닫는다. # --------------------------------------------------------------------- def arcclose(self): for fname in self.handle.keys(): zfile",
"# 추가해야 할 BBD list 개수는 한개의 BBD에는 bsize / 4 개수만큼 Big",
"'\\xff' * self.bsize * b_num for i in range(b_num): bbd_no.append(last_no) last_no += 1",
"file_infos - 압축 대상 파일 정보 구조체 # 리턴값 : 압축 성공 여부",
"t_data, t_link): for i, n in enumerate(t_link): off = (n + 1) *",
"파일을 분석 def close(self): if self.isfile: self.fp.close() if self.write_mode: open(self.fname, 'wb').write(self.mm) # ---------------------------------------------------------------------",
"= bbd # 체크 !!! bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) for i",
"Small block 링크 # num_link : 필요로 하는 전체 링크 수 # ---------------------------------------------------------------------",
"+ '\\x00' * 0x80 + buf[off+0x80:] if size is not None: t_off =",
"no in special_no: seg = no / bb_num off = no % bb_num",
"self.bsize) + (1 if (size % self.bsize) else 0) # 추가해야 할 Big",
"_ = get_bbd_list_array(self.mm) for i in range(len(bbd_list_array) / 4): no = kavutil.get_uint32(bbd_list_array, i",
"BBD') # 섹터가 변화는 것은 Dec, Inc가 의미 없음 n = (len(data) /",
"pps_name = '' name = prefix + pps_name else: if self.pps[node]['Valid'] is False:",
"special_no: seg = no / bb_num off = no % bb_num # print",
"추가해야 할 Big Block 개수 self.__add_big_block_num(add_big_num) # Big Block 추가 요청 # t_link",
"0xffffffff: # 양쪽 모두 노트가 존재함 # 1. prev 노드 값을 root로 보낸다.",
"self.isfile: self.fp.close() if self.write_mode: open(self.fname, 'wb').write(self.mm) # --------------------------------------------------------------------- # OLE 파싱하기 # ---------------------------------------------------------------------",
"och = [] for i in range(len(name) / 2): wch.append(kavutil.get_uint16(name, i * 2))",
"{} for i in range(len(self.bbd) / 4): n = kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i] =",
"= get_bbd_list_array(self.mm) self.bbd = '' for i in range(len(bbd_list_array)/4): n = kavutil.get_uint32(bbd_list_array, i*4)",
"# 입력값 : filename - 파일 이름 # 리턴값 : 압축 파일 핸들",
"접근 중인가? if isinstance(input_data, types.StringType): if os.path.exists(input_data): self.isfile = True self.fname = input_data",
"파일 내부의 파일 목록을 얻는다. # 입력값 : filename - 파일 이름 #",
"'[-] filename :', rname, len(buf) # print '[-] rname :', o.write_stream(a_name, buf) #",
"= get_bblock(self.mm, n, self.bsize) off = ((node % 4) * 0x80) if del_info",
"if next_b != 0xfffffffe: ret.append(next_b) while True: try: next_b = fat[next_b] if next_b",
"%-2s %-20s %4s %-8s %-8s %-8s %-8s %-8s' % ('No', 'Name', 'Type', 'Prev',",
"플러그인 엔진의 주요 정보 info = dict() # 사전형 변수 선언 info['author'] =",
"--------------------------------------------------------------------- def __valid_pps_tree(self): scaned_pps_node = [0] # 이미 분석한 노드의 경우 더이상 분석하지",
"self.bbd_fat[i] = n self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize 만큼씩",
"가리켜야 함으로 1를 더함 else: x_data += '\\xfe\\xff\\xff\\xff' # 마지막 블록의 링크는 끝을",
"# BBD 링크를 처음 생성하므로 이전 링크가 없다. t_link = self.__modify_big_block_link(None, t_num) #",
"= '' for no in root_list_array: self.root += get_bblock(self.mm, no, self.bsize) if self.verbose:",
"= self.__get_handle(filename) for name in o.listdir(): file_scan_list.append(['arc_ole', name]) return file_scan_list except: pass return",
"% ('No', 'Name', 'Type', 'Prev', 'Next', ' Dir', 'SB', 'Size') print ' '",
"--------------------------------------------------------------------- # getinfo(self) # 플러그인 엔진의 주요 정보를 알려준다. (제작자, 버전, ...) #",
"hex(t) # BBD List에 BBD 등록하기 for i, no in enumerate(bbd_no): off =",
"struct.pack('<L', last_no) off = (next_b + 1) * self.bsize # t_data의 위치 self.mm",
"구함 self.__modify_big_block_link(t_link, add_big_num) # 이전 링크에 필요한 블록 수 추가하여 링크를 새롭게 생성",
"/ 4)) else 0) x_num = total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD",
"__set_bblock(self, no, data): off = (no + 1) * self.bsize if len(data) ==",
"= self.__decrease_sbd_link(t_link, n) # 필요한 개수로 링크 줄이기 # Small block 영역에 ssize",
"1개의 Big Block Overwrite하기 (내장) # --------------------------------------------------------------------- def __set_bblock(self, no, data): off =",
"get_bbd_list_index_to_offset(buf, idx): num_of_bbd_blocks = kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) # num_of_xbbd_blocks =",
"파일 뒤에 추가하기 self.mm += '\\x00' * self.bsize * num # 실제 필요한",
": plugins_path - 플러그인 엔진의 위치 # verbose - 디버그 모드 (True or",
"off + 0x74 buf = buf[:t_off] + struct.pack('<L', start) + buf[t_off + 4:]",
"p['Name'] == name: no = p['Node'] break else: no = -1 if no",
"처리 f = [] if len(self.pps) == 0: # 분석된 PPS가 없으면 종료",
"kavutil.get_uint32(buf, 0x44) # num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e)",
"'%4d ' % p['Prev'] t += ' - ' if p['Next'] == 0xffffffff",
"t_link = get_block_link(org_sb, self.sbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat) #",
"Root 읽기 root_startblock = kavutil.get_uint32(self.mm, 0x30) root_list_array = get_block_link(root_startblock, self.bbd_fat) self.root_list_array = root_list_array",
"링크 # num_link : 필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_sbd_link(self,",
"o.listdir(): file_scan_list.append(['arc_ole', name]) return file_scan_list except: pass return [] # --------------------------------------------------------------------- # unarc(self,",
": 추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_small_block_num(self, num): root = self.pps[0]",
"return 0 pps_name = self.pps[node]['Name'].encode('cp949', 'ignore') name = prefix + '/' + pps_name",
"> num_link: # BBD를 배열로 바꾸기 t_link = [] for i in range(len(self.bbd)",
"* self.bsize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_num = len(t_data)",
"n # 추가해야 할 블록 수 add_data = ('\\x00' * self.bsize * add_num)",
"[] for i in range(len(self.bbd) / 4): t_link.append(kavutil.get_uint32(self.bbd, i * 4)) t =",
"node or pps['Next'] == node or pps['Dir'] == node: return i def __get_max_node(self,",
"실제 마지막 Big Block 번호 n = (len(self.bbd) / 4 - 1) -",
"total_bbd_num) + self.mm[0x30:] last_no += 1 # XBBD 처리하기 if total_bbd_num > 109:",
"old_link : 기존 BBD link # add_num : 추가 BBD link 개수 #",
"인가? o = OleFile(filename) try: pics = o.openstream('FileHeader') d = pics.read() if d[:0x11]",
"self.node = node self.read_size = 0 self.fat = None # print self.parent.verbose #",
"특정 데이터를 big block 링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_big_block(self,",
"self.ssize # 몇개의 블록이 필요한가? self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # SBD",
"# PPS 읽기 self.pps = [] for i in range(len(self.root) / 0x80): p",
"= 1 << kavutil.get_uint16(self.mm, 0x20) if self.verbose: kavutil.vprint('Header') kavutil.vprint(None, 'Big Block Size', '%d'",
"self.__set_pps_header(del_no, size=0, start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True) return self.mm def write(self, no, data):",
"엔진을 종료한다. # 리턴값 : 0 - 성공, 0 이외의 값 - 실패",
"# Root 크기 수정 self.__set_pps_header(0, size=r_size + add_big_num * self.bsize) # --------------------------------------------------------------------- #",
"b_num for i in range(b_num): bbd_no.append(last_no) last_no += 1 # 최종 조합 self.mm",
"압축 가능 엔진 ID # arc_name - 최종적으로 압축될 압축 파일 이름 #",
"and next_no == 0xffffffff: # Prev만 존재 # 1. prev 노드 값을 root로",
"OLE 재로딩 elif target_pps['Valid'] and target_pps['Type'] == 1 and delete_storage: # 유효한 스토리지?",
"0x40) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) self.sbd = '' for no in sbd_list_array: self.sbd",
"t_off = off + 0x4C buf = buf[:t_off] + struct.pack('<L', pps_dir) + buf[t_off",
"i in t_link: self.bbd += struct.pack('<L', i) # self.mm에 BBD 적용하기 t, num_of_bbd_blocks,",
"pps self.bsize = bsize self.ssize = ssize self.bbd = bbd self.bbd_fat = bbd_fat",
"압축 대상 파일 정보 구조체 # 리턴값 : 압축 성공 여부 (True or",
"# --------------------------------------------------------------------- # SBD 링크를 줄인다 # org_link_list : 기존 Small block 링크",
"= (fsize / bsize) * bsize if fsize > rsize: fileformat = {",
"pics = o.openstream('FileHeader') d = pics.read() d = d + d o.write_stream('FileHeader', d)",
"+= ' - ' if p['Start'] == 0xffffffff else '%8X ' % p['Start']",
"format(self, filehandle, filename, filename_ex): ret = {} mm = filehandle # OLE 헤더와",
"0x40), 0x40) if t_size != 0: # 출력시 이름이 깨질 가능성이 큼 if",
"i*4)) # 사용하지 않는 SBD 링크를 찾는다. free_link = [i for i, no",
"= pics.read() o.close() ''' # XBBD 늘어나는 경우 # o = OleFile('xbbd2.ppt', write_mode=True,",
"개수는 한개의 BBD에는 bsize / 4 개수만큼 Big Block을 담을 수 있음 b_num",
"%d %8X %8X %8X %8X %8d' % (self.pps.index(p), p['Name'], p['Type'], p['Prev'], p['Next'], p['Dir'],",
"= self.pps[no]['Size'] ''' if org_size >= 0x1000: # read_size = self.bsize fat =",
"PPS 크기 수정 self.__set_pps_header(no, size=len(data)) return self.mm # --------------------------------------------------------------------- # 특정 데이터를 big",
"e break else: for i in range(len(num_list)): num_list.pop(0) end = e return start,",
"class OleFile: def __init__(self, input_data, write_mode=False, verbose=False): self.verbose = verbose # 디버깅용 self.isfile",
"0x3c) # sbd_list_array = get_block_link(sbd_no, self.bbd) sbd_list_array = get_block_link(sbd_no, self.bbd_fat) # print sbd_list_array",
"# 입력값 : arc_engine_id - 압축 가능 엔진 ID # arc_name - 최종적으로",
"repr(t) # t = kavutil.get_uint32(t, off*4) # print hex(t) # BBD List에 BBD",
"- 0x383F # the value contains two characters ch -= 0x3800 och.append(MsiBase64Encode(ch &",
"t_num = 0 if (len(t_link) * self.ssize) < len(t_data): # 블록 추가해야 하나?",
"정보 info = dict() # 사전형 변수 선언 info['author'] = '<NAME>' # 제작자",
"리턴값 : 압축 성공 여부 (True or False) # --------------------------------------------------------------------- def mkarc(self, arc_engine_id,",
"저장 p = {'Node': node, 'Name': name[1:], 'Type': self.pps[node]['Type']} self.__full_list.append(p) if self.pps[node]['Dir'] !=",
"링크를 새롭게 생성 # Root 크기 수정 self.__set_pps_header(0, size=r_size + add_big_num * self.bsize)",
"class OleWriteStream: def __init__(self, mm, pps, bsize, ssize, bbd, bbd_fat, sbd, sbd_fat, root_list_array,",
"PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support : SBD ->",
"len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 # t_link = get_block_link(org_sb, self.sbd) #",
"data is invalid.') # 수정 모드 self.write_mode = write_mode # OLE 주요 데이터",
"if p['Dir'] == 0xffffffff else '%4d ' % p['Dir'] t += ' -",
"# the value contains two characters ch -= 0x3800 och.append(MsiBase64Encode(ch & 0x3f)) ch",
"# --------------------------------------------------------------------- def __valid_pps_tree(self): scaned_pps_node = [0] # 이미 분석한 노드의 경우 더이상",
"elif len(org_link_list) == num_link: return org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- #",
"정보 # --------------------------------------------------------------------- def getinfo(self): # 플러그인 엔진의 주요 정보 info = dict()",
"rname :', o.write_stream(a_name, buf) # zfile.writestr(a_name, buf) else: # 삭제 처리 o.delete(a_name) except",
"플러그인 엔진 초기화 성공 # --------------------------------------------------------------------- # uninit(self) # 플러그인 엔진을 종료한다. #",
"o.close() ''' o = OleFile('normal.hwp', verbose=True) pics = o.openstream('PrvImage') print get_block_link(o.pps[6]['Start'], o.sbd) #",
"!= 0: # 출력시 이름이 깨질 가능성이 큼 if ord(pps[0]) & 0xF0 ==",
"kernel import kavutil # ------------------------------------------------------------------------- # 메시지 출력 함수 # ------------------------------------------------------------------------- __version__ =",
"last_no) off = (next_b + 1) * self.bsize # t_data의 위치 self.mm =",
"data = None if arc_engine_id == 'arc_ole': o = self.__get_handle(arc_name) fp = o.openstream(fname_in_arc)",
"# --------------------------------------------------------------------- def get_bbd_list_array(buf, verbose=False): bbd_list_array = buf[0x4c:0x200] # 전체 bbd_list num_of_bbd_blocks =",
"input_data self.fp = open(input_data, 'rb') buf = self.fp.read() else: buf = input_data else:",
"이전 링크가 없다면... ret_link = free_link[:add_num] # 최종 결과의 BBD 링크 t_link =",
"pps[0:t_size-2] p['Name'] = DecodeStreamName(name).decode('UTF-16LE', 'replace') else: p['Name'] = '' p['Type'] = ord(pps[0x42]) p['Prev']",
"Block 링크가 필요하다 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize 만큼씩",
"OLE 주요 데이터 self.mm = buf self.bsize = 0 self.ssize = 0 #",
"# 링크 삭제 if t: self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- #",
"__name__ == '__main__': # import zlib # o = OleFile('normal.hwp', write_mode=True, verbose=True) o",
"-15) d = d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') # var -> foo d = zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript',",
"# o = OleFile('normal.hwp', write_mode=True, verbose=True) o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True) print o.listdir()",
"kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0, 80) return data[:size] def close(self): pass # ----------------------------------------------------------------- for p",
"arc_engine_id, arc_name, file_infos) # 입력값 : arc_engine_id - 압축 가능 엔진 ID #",
"mm = filehandle # OLE 헤더와 동일 if mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] =",
"압축 파일 내부의 파일 목록을 얻는다. # 입력값 : filename - 파일 이름",
"바꾸기 self.bbd = '' for i in t_link: self.bbd += struct.pack('<L', i) #",
"except: pass return [] # --------------------------------------------------------------------- # unarc(self, arc_engine_id, arc_name, fname_in_arc) # 입력값",
"+ free_link[:add_num] # BBD에 링크 연결하기 else: # 이전 링크가 없다면... ret_link =",
"no ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block,",
"정보를 모두 가짐 # 미리 분석된 파일 포맷중에 OLE 파일 포맷이 있는가? if",
"파일로 읽기 off = (s + 1) * self.read_size data += self.parent.mm[off:off +",
"o.openstream('Scripts/DefaultJScript') d = pics.read() d = zlib.decompress(d, -15) d = d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o') #",
"'' for i in range(len(bbd_list_array)/4): n = kavutil.get_uint32(bbd_list_array, i*4) self.bbd += get_bblock(self.mm, n,",
"add_num : 추가 BBD link 개수 # --------------------------------------------------------------------- def __modify_big_block_link(self, old_link, add_num): if",
"여유분이 충분히 존재함... return # 추가할 필요 없음 else: # 여유분이 부족함. 따라서",
"(size / self.bsize) + (1 if (size % self.bsize) else 0) # 추가해야",
"= self.__decrease_bbd_link(t_link, n) # 필요한 개수로 링크 줄이기 # Big block 영역에 bsize",
"이전에 열린 핸들이 존재하는가? zfile = self.handle.get(filename, None) else: zfile = OleFile(filename, verbose=self.verbose)",
"PPS 인덱스 # size : 설정 크기 # start : 시작 링크 #",
"따라서 Root를 늘려야 함 size = num * self.ssize # 추가해야 할 용량",
"def __add_small_block_num(self, num): root = self.pps[0] r_size = root['Size'] r_no = root['Start'] #",
"None: t_off = off + 0x4C buf = buf[:t_off] + struct.pack('<L', pps_dir) +",
"+ 4:] self.__set_bblock(n, buf) if self.verbose: print buf = get_bblock(self.mm, n, self.bsize) kavutil.HexDump().Buffer(buf,",
"# --------------------------------------------------------------------- # BBD link 추가 요청한다. (원본 이미지의 BBD link가 수정 됨)",
"next_b, self.bsize) else: t_data = '' for i in range(num_of_xbbd_blocks-1): t_data = get_bblock(self.mm,",
"성공 # --------------------------------------------------------------------- # uninit(self) # 플러그인 엔진을 종료한다. # 리턴값 : 0",
"뒤쪽에 추가하기 # t_link = get_block_link(org_sb, self.sbd) # 이전 링크 수집하기 t_link =",
"pics = o.openstream('Scripts/DefaultJScript') d = pics.read() d = zlib.decompress(d, -15) d = d.replace(b'v\\x00a\\x00r',",
"org_size >= len(data): # raise error('Not Support : BBD -> BBD (Dec)') #",
"t = ow.write(no, '\\x00' * size) # 모든 데이터를 0으로 Wipe t =",
"bbd, bbd_fat, sbd, sbd_fat, root_list_array, small_block, verbose): self.verbose = verbose self.mm = mm",
"self.bsize self.mm = self.mm[:off] + t_data[i * self.bsize:(i + 1) * self.bsize] +",
"e = x loop = True continue else: while loop: if e ==",
"return [] sbd = self.sbd if self.verbose: open('sbd.dm2', 'wb').write(sbd) # SBD 링크를 생성한다.",
"len(self.bbd_list_array)/4 < num_of_bbd_blocks: return False self.bbd = '' for i in range(num_of_bbd_blocks): no",
"# 수정된 data를 쓰기 위해 준비한다 if len(data) >= 0x1000: # BBD를 사용한다.",
"num_list.pop(0): break end = e break else: for i in range(len(num_list)): num_list.pop(0) end",
"elif p['Type'] == 1 and storages: ret.append(p['Name']) else: pass return ret # ---------------------------------------------------------------------",
"= kavutil.get_uint32(t_data, self.bsize-4) # 기존 XBBD 마지막에 새로운 XBBD 링크 추가 t_data =",
"분석된 파일 포맷중에 OLE 파일 포맷이 있는가? if 'ff_ole' in fileformat: try: #",
"종료 return False if self.pps[0]['Dir'] != 0xffffffff and self.pps[0]['Type'] == 5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir'])",
"off = (no + 1) * self.bsize if len(data) == self.bsize: self.mm =",
"bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) self.bbd = '' for i in range(len(bbd_list_array)/4):",
"n, self.bsize) # 새로운 Small Block 링크가 필요하다 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) #",
"1) * self.parent.bsize off += (n % div_n) * self.parent.ssize data += self.parent.mm[off:off",
"call') # --------------------------------------------------------------------- # Big Block을 주어진 개수만큼 추가한다. # num : 추가할",
"List에 BBD 등록하기 for i, no in enumerate(bbd_no): off = get_bbd_list_index_to_offset(self.mm, old_num_bbd +",
"# ------------------------------------------------------------------------- # 메시지 출력 함수 # ------------------------------------------------------------------------- __version__ = '1.0' # -------------------------------------------------------------------------",
"self.mm[:off] + t_data[i * self.bsize:(i + 1) * self.bsize] + self.mm[off + self.bsize:]",
"None: t_off = off + 0x44 buf = buf[:t_off] + struct.pack('<L', pps_prev) +",
"self.fat = None # print self.parent.verbose # 연속된 숫자 값을 리턴한다. # TODO",
"== num_link: return org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- # BBD 링크를",
"'%8X ' % p['Start'] tname = p['Name'].encode(sys.stdout.encoding, 'replace') print ' ' + '%2d",
"= p['Node'] break else: no = -1 if no == -1: raise Error('PPS",
"free_link = [i for i, no in enumerate(bbd_link) if (no == 0xffffffff and",
"# --------------------------------------------------------------------- def listvirus(self): # 진단 가능한 악성코드 리스트 vlist = list() #",
"return file_scan_list except: pass return [] # --------------------------------------------------------------------- # unarc(self, arc_engine_id, arc_name, fname_in_arc)",
"0x1000: # 기존에는 BBD 사용 # raise error('Not Support : BBD -> SBD')",
"초기화 성공 # --------------------------------------------------------------------- # uninit(self) # 플러그인 엔진을 종료한다. # 리턴값 :",
"x: e = x loop = True continue else: while loop: if e",
"self.get_liner_value(t_list) # 연속된 링크를 모두 수집해서 한꺼번에 파일로 읽기 off = (s +",
"t_link = self.__decrease_bbd_link(t_link, n) # 필요한 개수로 링크 줄이기 # Big block 영역에",
"t_link = [] for i in range(len(self.sbd) / 4): t_link.append(kavutil.get_uint32(self.sbd, i * 4))",
"bsize 단위가 아니면 맞춘다. n = len(sbd) % self.bsize if n: t =",
"검사하기 # pps에 ListView.2의 CLSID가 존재함 # 참고 : https://securelist.com/the-curious-case-of-a-cve-2012-0158-exploit/37158/ # 참고 :",
"# print repr(t) # t = kavutil.get_uint32(t, off*4) # print hex(t) # BBD",
"Root 수정, Next 수정 o.close() ''' o = OleFile('normal.hwp', verbose=True) pics = o.openstream('PrvImage')",
": BBD -> SBD') # 섹터가 변화는 것은 Dec, Inc가 의미 없음 n",
"self.handle[fname] zfile.close() self.handle.pop(fname) # --------------------------------------------------------------------- # mkarc(self, arc_engine_id, arc_name, file_infos) # 입력값 :",
"not None: t_off = off + 0x44 buf = buf[:t_off] + struct.pack('<L', pps_prev)",
"(next_b + 1) * bsize + (off * 4) # --------------------------------------------------------------------- # OLE",
"= None self.root = None self.pps = None self.small_block = None self.root_list_array =",
"정보 처리 return False # bbd 읽기 self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\",
"# 이전 Small Block의 링크를 구함 t_link = get_block_link(r_no, self.bbd_fat) # 이전 Small",
"t_link = self.__modify_big_block_link(t_link, t_num) # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link)",
"필요한 블록 수 추가하여 링크를 새롭게 생성 # Root 크기 수정 self.__set_pps_header(0, size=r_size",
"1 << kavutil.get_uint16(self.mm, 0x1e) self.ssize = 1 << kavutil.get_uint16(self.mm, 0x20) if self.verbose: kavutil.vprint('Header')",
"_, _ = get_bbd_list_array(self.mm) for i in range(len(bbd_list_array) / 4): no = kavutil.get_uint32(bbd_list_array,",
"t_data = get_bblock(buf, next_b, bsize) bbd_list_array += t_data[:-4] next_b = kavutil.get_uint32(t_data, bsize-4) return",
"node self.read_size = 0 self.fat = None # print self.parent.verbose # 연속된 숫자",
"헤더와 동일 if mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] = 'OLE' # OLE 뒤에 첨부된",
"if t: self.init(t) # 새롭게 OLE 재로딩 elif target_pps['Valid'] and target_pps['Type'] == 1",
"= '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._' if x > 63: return None return ord(ct[x]) def DecodeStreamName(name): wch",
"영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정, start 블록 수정",
"num # 실제 필요한 데이터 블록 self.mm += attach_data else: special_no = []",
"self.bsize) t_num = (t_size / self.bsize) + (1 if (t_size % self.bsize) else",
"mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] = 'OLE' # OLE 뒤에 첨부된 파일이 있는지를 조사한다.",
"+ data + self.mm[0x4C:] # XBBD 블록 연결 next_b = xbbd_start_block if num_of_xbbd_blocks",
"= self.handle[fname] zfile.close() self.handle.pop(fname) # --------------------------------------------------------------------- # mkarc(self, arc_engine_id, arc_name, file_infos) # 입력값",
"BBD (Dec)') # 개발 완료 n = (len(data) / self.bsize) + (1 if",
"for i in range(len(self.sbd) / 4): t_link.append(kavutil.get_uint32(self.sbd, i * 4)) t = org_link_list[num_link:]",
"없는 node를 찾아서 del_pps의 next_no를 등록한다. blank_next_no = self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no) elif prev_no",
"t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) return self.mm # --------------------------------------------------------------------- # 특정",
"bbd[i * self.bsize:(i + 1) * self.bsize] off = (no + 1) *",
"num_of_xbbd_blocks, xbbd_start_block # --------------------------------------------------------------------- # OLE의 BBD list의 index를 Offset으로 리턴한다. # ---------------------------------------------------------------------",
"except Error: pass o.close() return ret # --------------------------------------------------------------------- # __get_handle(self, filename) # 압축",
"org_link_list, num_link): if len(org_link_list) > num_link: # BBD를 배열로 바꾸기 t_link = []",
"return vlist # --------------------------------------------------------------------- # format(self, filehandle, filename, filename_ex) # 파일 포맷을 분석한다.",
"kavutil.get_uint32(pps, 0x4c) p['Start'] = kavutil.get_uint32(pps, 0x74) p['Size'] = kavutil.get_uint32(pps, 0x78) p['Valid'] = False",
"4)) else 0) x_num = total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수",
"헤더 수정 if num_of_xbbd_blocks == 0: data = struct.pack('<LL', last_no, total_xbbd_num) self.mm =",
"self.ssize) else 0) t_data = data + ('\\x00' * ((n * self.ssize) -",
"p['Name'] = DecodeStreamName(name).decode('UTF-16LE', 'replace') else: p['Name'] = '' p['Type'] = ord(pps[0x42]) p['Prev'] =",
"self.mm[off+self.bsize:] return True return False # --------------------------------------------------------------------- # PPS 헤더에 존재하는 특정 스트림의",
"def __get_pps_path(self, node=0, prefix=''): if node == 0: pps_name = '' name =",
"' if p['Dir'] == 0xffffffff else '%4d ' % p['Dir'] t += '",
"만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # raise",
"4 - 1) - last_no if n >= num: # 잔여 개수가 추가하려는",
"sbd_no = kavutil.get_uint32(self.mm, 0x3c) # sbd_list_array = get_block_link(sbd_no, self.bbd) sbd_list_array = get_block_link(sbd_no, self.bbd_fat)",
"0x2c) xbbd_start_block = kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(self.mm, 0x48) # 추가적인 Big Block을",
"[[압축 엔진 ID, 압축된 파일 이름]] # --------------------------------------------------------------------- def arclist(self, filename, fileformat): file_scan_list",
"num_link: # BBD를 배열로 바꾸기 t_link = [] for i in range(len(self.bbd) /",
"error('Not Support : BBD -> BBD (Dec)') # 개발 완료 n = (len(data)",
"and next_no == 0xffffffff: # 단일 노드 # 1. 0xffffffff 노드 값을 root로",
"sbd, sbd_fat, root_list_array, small_block, verbose): self.verbose = verbose self.mm = mm self.pps =",
"열린 핸들이 존재하는가? zfile = self.handle.get(filename, None) else: zfile = OleFile(filename, verbose=self.verbose) #",
"이후에 존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_big_block_link(t_link, t_num) # Big block",
"# --------------------------------------------------------------------- # 스트림이 존재하는가? # --------------------------------------------------------------------- def exists(self, name): for p in",
"# print hex(off) self.mm = (self.mm[:off] + struct.pack('<L', no) + self.mm[off+4:]) # ---------------------------------------------------------------------",
"적용하기 t, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) bbd_list_array = [] for",
"free_link[:add_num] # 최종 결과의 SBD 링크 t_link = old_link[-1:] + free_link[:add_num] # SBD에",
"리턴값 : [[압축 엔진 ID, 압축된 파일 이름]] # --------------------------------------------------------------------- def arclist(self, filename,",
"pps_dir=0xffffffff, del_info=True) return self.mm def write(self, no, data): # 기존 PPS 정보를 얻는다",
"- 압축 대상 파일 정보 구조체 # 리턴값 : 압축 성공 여부 (True",
"t_size = len(t_data) - (len(t_link) * self.ssize) t_num = (t_size / self.ssize) +",
"--------------------------------------------------------------------- # BBD 링크를 줄인다 # org_link_list : 기존 Small block 링크 #",
"해당 정보를 가진 root를 찾기 for i, pps in enumerate(self.pps): if pps['Prev'] ==",
"self.bsize] off = (no + 1) * self.bsize self.mm = self.mm[:off] + data",
"= kavutil.get_uint32(t_data, bsize-4) return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block # --------------------------------------------------------------------- # OLE의 BBD",
"리스트 vlist = list() # 리스트형 변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는 악성코드 이름",
"4:] if pps_next is not None: t_off = off + 0x48 buf =",
"self.sbd_fat) # 이전 링크 수집하기 t_num = 0 if (len(t_link) * self.ssize) <",
"'%d' % root_startblock) print kavutil.HexDump().Buffer(self.root, 0, 0x80) # sbd 읽기 sbd_startblock = kavutil.get_uint32(self.mm,",
"[i for i, no in enumerate(bbd_link) if (no == 0xffffffff and i <",
"= 0 self.fat = None # print self.parent.verbose # 연속된 숫자 값을 리턴한다.",
"# --------------------------------------------------------------------- # MisiBase64 인코더 디코더 # --------------------------------------------------------------------- def MsiBase64Encode(x): ct = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz._'",
"...) # 리턴값 : 플러그인 엔진 정보 # --------------------------------------------------------------------- def getinfo(self): # 플러그인",
"4) - 1)) + (1 if (t_idx % ((bsize / 4) - 1))",
"삭제 노드 값은 모두 지우기 self.__set_pps_header(del_no, size=0, start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True) return",
"range(x_num): x_data += '\\xff\\xff\\xff\\xff' * ((self.bsize/4) - 1) if i != (x_num-1): x_data",
"1 << kavutil.get_uint16(buf, 0x1e) if verbose: kavutil.vprint(None, 'Num of BBD Blocks', '%d' %",
"아니면 맞춘다. n = len(sbd) % self.bsize if n: t = self.bsize -",
"+ sbd[(no + 1) * 4:] # SBD가 나누어 bsize 단위가 아니면 맞춘다.",
"= o.openstream('FileHeader') d = pics.read() d = d + d o.write_stream('FileHeader', d) o.close()",
"raise error('Not Support : SBD -> SBD (Inc)') # 작업 완료 n =",
"ow.write(no, '\\x00' * size) # 모든 데이터를 0으로 Wipe t = ow.delete(no) if",
"d = pics.read() d = d + d o.write_stream('FileHeader', d) o.close() ''' '''",
"모두 가짐 # 미리 분석된 파일 포맷중에 OLE 파일 포맷이 있는가? if 'ff_ole'",
"scaned_pps_node.append(self.pps[x]['Prev']) if self.pps[x]['Next'] != 0xffffffff: if self.pps[x]['Next'] in scaned_pps_node: self.pps[x]['Next'] = 0xffffffff else:",
"== 0xffffffff and i < size / self.bsize)] if len(free_link) >= num: #",
"# 최종 결과의 BBD 링크 t_link = free_link[:add_num] # BBD에 링크 연결하기 for",
"streams=True, storages=False): ret = [] for p in self.__full_list: if p['Type'] == 2",
"= get_bblock(self.mm, next_b, self.bsize) else: t_data = '' for i in range(num_of_xbbd_blocks-1): t_data",
"i in range(num_of_bbd_blocks): no = kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd += get_bblock(self.mm, no, self.bsize) self.bbd_fat",
"= self.bbd else: # read_size = self.ssize fat = self.sbd # org_list_array =",
"in range(len(t) / 4): bbd_list_array.append(kavutil.get_uint32(t, i * 4)) for i, n in enumerate(bbd_list_array):",
"self.isfile = False # 파일로 접근 중인가? if isinstance(input_data, types.StringType): if os.path.exists(input_data): self.isfile",
"= [i for i, no in enumerate(bbd_link) if (no == 0xffffffff)] if old_link:",
"i*4) bbd += get_bblock(self.mm, no, self.bsize) if self.verbose: open('bbd.dm2', 'wb').write(bbd) bbd_link = []",
"write_mode # OLE 주요 데이터 self.mm = None self.bsize = None self.ssize =",
"self.verbose = verbose # 디버깅용 self.isfile = False # 파일로 접근 중인가? if",
"for i, n in enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link:",
"(size % self.bsize) else 0) # 추가해야 할 Big Block 개수 self.__add_big_block_num(add_big_num) #",
"kavutil.vprint('ROOT') kavutil.vprint(None, 'Start Blocks', '%d' % root_startblock) print kavutil.HexDump().Buffer(self.root, 0, 0x80) # sbd",
"b_num # 전체 BBD list 개수 self.mm = self.mm[:0x2c] + struct.pack('<L', total_bbd_num) +",
"[] for i in range(len(sbd) / 4): sbd_link.append(kavutil.get_uint32(sbd, i*4)) # 사용하지 않는 SBD",
"self.sbd # org_list_array = get_block_link(org_sb, fat) ''' # 수정된 data를 쓰기 위해 준비한다",
"no = t_link[i+1] data = struct.pack('<L', no) no = t_link[i] bbd = bbd[:no*4]",
"i def __get_max_node(self, node): # 특정 노드의 Max 값을 가진 node를 찾기 no",
"storages=False): ret = [] for p in self.__full_list: if p['Type'] == 2 and",
"node : PPS 인덱스 # size : 설정 크기 # start : 시작",
"self.parent.sbd_fat list_array = get_block_link(sb, self.fat) data = '' if size >= 0x1000: t_list",
"엔진 종료 성공 # --------------------------------------------------------------------- # getinfo(self) # 플러그인 엔진의 주요 정보를 알려준다.",
"--------------------------------------------------------------------- def openstream(self, name): # ----------------------------------------------------------------- # 스트림 전용 클래스 # ----------------------------------------------------------------- class",
"0x90900000: # CVE-2003-0820 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0820') return False else: # CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return",
"OLE 파일 포맷이 있는가? if 'ff_ole' in fileformat: try: # OLE Stream 목록",
"# --------------------------------------------------------------------- def exists(self, name): for p in self.__full_list: if p['Name'] == name:",
"존재함 # 1. prev 노드 값을 root로 보낸다. t_no = prev_no # 2.",
"self.__set_pps_header(no, size=len(data)) else: # raise error('Not Support : SBD -> SBD (Inc)') #",
"!= 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix) if self.pps[node]['Next'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix) return 0 #",
"if no == -1: raise Error('PPS name is invalid.') return Stream(self, no) #",
"핸들을 닫는다. # --------------------------------------------------------------------- def arcclose(self): for fname in self.handle.keys(): zfile = self.handle[fname]",
"if org_size >= len(data): # raise error('Not Support : SBD -> SBD (Dec)')",
"분석하지 않기 위해 처리 f = [] if len(self.pps) == 0: # 분석된",
"담을 공간 'Attached_Pos': rsize, 'Attached_Size': fsize - rsize } ret['ff_attach'] = fileformat #",
"영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정, start 블록 수정",
"입력값 : filename - 파일 이름 # fileformat - 파일 포맷 분석 정보",
"# --------------------------------------------------------------------- def get_block_link(no, bbd_or_sbd_fat): ret = [] fat = bbd_or_sbd_fat next_b =",
"t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) else: # 기존에는 SBD 사용 #",
"때문에... old_b_num = b_num while True: if old_num_bbd + b_num > 109: t_num",
"self.bsize # 파일 크기 self.mm = self.mm[:size] # 뒤쪽 쓸모 없는 부분은 제거",
"(1 if (add_num % (self.bsize/4)) else 0) old_num_bbd = kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block =",
"+= ' - ' if p['Next'] == 0xffffffff else '%4d ' % p['Next']",
"self.ssize # 추가해야 할 용량 add_big_num = (size / self.bsize) + (1 if",
"scaned_pps_node: self.pps[x]['Dir'] = 0xffffffff else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return True # --------------------------------------------------------------------- # PPS",
"None) else: zfile = OleFile(filename, verbose=self.verbose) # ole 파일 열기 self.handle[filename] = zfile",
"no = t_link[-1] bbd = bbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + bbd[(no +",
"'OLE' # OLE 뒤에 첨부된 파일이 있는지를 조사한다. fsize = len(mm) bsize =",
"# ------------------------------------------------------------------------- __version__ = '1.0' # ------------------------------------------------------------------------- # 엔진 오류 메시지를 정의 #",
"필요하다 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data,",
"root_list_array = get_block_link(root_startblock, self.bbd_fat) self.root_list_array = root_list_array self.root = '' for no in",
"클래스 # ----------------------------------------------------------------- class Stream: def __init__(self, parent, node): self.parent = parent self.node",
"파일 크기 self.mm = self.mm[:size] # 뒤쪽 쓸모 없는 부분은 제거 attach_data =",
"arc_name, file_infos): if arc_engine_id == 'arc_ole': o = OleFile(arc_name, write_mode=True) # , verbose=True)",
"= fp.read() # print '[-] filename :', rname, len(buf) # print '[-] rname",
"기존 PPS 정보를 얻는다 org_sb = self.pps[no]['Start'] org_size = self.pps[no]['Size'] ''' if org_size",
"= kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize =",
": filehandle - 파일 핸들 # filename - 파일 이름 # filename_ex -",
"수집하기 t_link = self.__decrease_bbd_link(t_link, n) # 필요한 개수로 링크 줄이기 # Big block",
"# 여분의 크기를 data 뒤쪽에 추가하기 # t_link = get_block_link(org_sb, self.sbd) # 이전",
"0xffffffff else '%4d ' % p['Prev'] t += ' - ' if p['Next']",
"0x1e) if verbose: kavutil.vprint(None, 'Num of BBD Blocks', '%d' % num_of_bbd_blocks) kavutil.vprint(None, 'XBBD",
"buf = fp.read() # print '[-] filename :', rname, len(buf) # print '[-]",
"4) - 1)) next_b = xbbd_start_block for i in range(seg): if next_b ==",
"0 이외의 값 - 실패 # --------------------------------------------------------------------- def init(self, plugins_path, verbose=False): # 플러그인",
"# Author: <NAME>(<EMAIL>) import os import sys import struct import types import kernel",
"# --------------------------------------------------------------------- def write_stream(self, name, data): for p in self.__full_list: if p['Name'] ==",
"zfile.writestr(a_name, buf) else: # 삭제 처리 o.delete(a_name) except IOError: # print file_info.get_filename_in_archive() pass",
"XBBD 생성하기 for i in range(x_num): x_data += '\\xff\\xff\\xff\\xff' * ((self.bsize/4) - 1)",
"사용하지 않는 SBD 링크를 찾는다. free_link = [i for i, no in enumerate(sbd_link)",
"buf = buf[:t_off] + struct.pack('<L', pps_dir) + buf[t_off + 4:] self.__set_bblock(n, buf) if",
"# 연속된 숫자 값을 리턴한다. # TODO : 임시로 작성한거라 최적화 필요함 def",
"b_num > 109: t_num = (old_num_bbd + b_num - 109) total_xbbd_num = (t_num",
"늘어나는 경우 # o = OleFile('xbbd2.ppt', write_mode=True, verbose=True) # o.test() ''' # 늘어나는건",
"# 이전 링크 수집하기 t_link = self.__decrease_sbd_link(t_link, n) # 필요한 개수로 링크 줄이기",
"= bbd_or_sbd_fat next_b = no if next_b != 0xfffffffe: ret.append(next_b) while True: try:",
"개수가 109보다 크면 xbbd를 가져와야 함 next_b = xbbd_start_block for i in range(num_of_xbbd_blocks):",
"모두 삭제한다. # t_link = get_block_link(org_sb, self.bbd) # 이전 링크 수집하기 t_link =",
"self.mm += '\\x00' * self.bsize * num # 실제 필요한 데이터 블록 self.mm",
"self.__deep = 0 self.__full_list = [] self.parse() # OLE 파일을 분석 def close(self):",
"free_link[:add_num] # BBD에 링크 연결하기 else: # 이전 링크가 없다면... ret_link = free_link[:add_num]",
"+ 1) * self.bsize # t_data의 위치 self.mm = self.mm[:off] + t_data +",
": 추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_big_block_num(self, num): size = (len(self.mm)",
"in och: ret_str += struct.pack('<H', ch) # print ret_str.decode('UTF-16LE', 'replace') return ret_str #",
"in enumerate(t_link): off = (n + 1) * self.bsize self.mm = self.mm[:off] +",
"p['Name'].encode(sys.stdout.encoding, 'replace') print ' ' + '%2d %-35s %d %22s %8d' % (self.pps.index(p),",
"pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True) return self.mm def write(self, no, data): # 기존 PPS",
"raise Error('Input data is invalid.') # 수정 모드 self.write_mode = write_mode # OLE",
"for ch in och: ret_str += struct.pack('<H', ch) # print ret_str.decode('UTF-16LE', 'replace') return",
"취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return False self.pps[x]['Valid'] = True if self.pps[x]['Prev'] != 0xffffffff: if self.pps[x]['Prev']",
"미리 분석된 파일 포맷중에 OLE 파일 포맷이 있는가? if 'ff_ole' in fileformat: try:",
"* (e - s + 1)] else: for n in list_array: div_n =",
": BBD -> BBD (Dec)') # 개발 완료 n = (len(data) / self.bsize)",
"n = (len(data) / self.bsize) + (1 if (len(data) % self.bsize) else 0)",
": 수정된 BBD 이미지 # --------------------------------------------------------------------- def __modify_bbd(self, bbd): self.bbd = bbd #",
"[] # 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) #",
"root를 찾기 for i, pps in enumerate(self.pps): if pps['Prev'] == node or pps['Next']",
"배열로 바꾸기 t_link = [] for i in range(len(self.bbd) / 4): t_link.append(kavutil.get_uint32(self.bbd, i",
"# return ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array,",
"4): n = kavutil.get_uint32(self.bbd, i*4) self.bbd_fat[i] = n if self.verbose: open('bbd.dmp', 'wb').write(self.bbd) print",
"63: return None return ord(ct[x]) def DecodeStreamName(name): wch = [] och = []",
"= self.pps[no]['Start'] org_size = self.pps[no]['Size'] ''' if org_size >= 0x1000: # read_size =",
"'\\x00' * size) # 모든 데이터를 0으로 Wipe t = ow.delete(no) if t:",
"# 악성코드 치료 후 재압축 유무 info['sig_num'] = len(self.listvirus()) # 진단/치료 가능한 악성코드",
"self.mm[:off] + data + self.mm[off+self.bsize:] return True return False # --------------------------------------------------------------------- # PPS",
"t_data = data + ('\\x00' * ((n * self.bsize) - len(data))) # 여분의",
"if num_of_xbbd_blocks == 0: data = struct.pack('<LL', last_no, total_xbbd_num) self.mm = self.mm[:0x44] +",
"메시지를 정의 # ------------------------------------------------------------------------- class Error(Exception): pass # --------------------------------------------------------------------- # MisiBase64 인코더 디코더",
"= 1 << kavutil.get_uint16(buf, 0x1e) if idx >= num_of_bbd_blocks: # 범위를 벗어나면 에러",
"!= 0xffffffff: if self.pps[x]['Prev'] in scaned_pps_node: self.pps[x]['Prev'] = 0xffffffff else: f.append(self.pps[x]['Prev']) scaned_pps_node.append(self.pps[x]['Prev']) if",
"/ 4 - 1) - last_no if n >= num: # 잔여 개수가",
"in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) if self.verbose:",
"# fname_in_arc - 압축 해제할 파일 이름 # 리턴값 : 압축 해제된 내용",
"if (len(data) % self.bsize) else 0) t_data = data + ('\\x00' * ((n",
"%-8s %-8s %-8s %-8s %-8s' % ('No', 'Name', 'Type', 'Prev', 'Next', 'Dir', 'SB',",
"if pps['Prev'] == del_no: self.__set_pps_header(root_no, pps_prev=t_no) elif pps['Next'] == del_no: self.__set_pps_header(root_no, pps_next=t_no) else:",
"0x30) root_list_array = get_block_link(root_startblock, self.bbd_fat) self.root_list_array = root_list_array self.root = '' for no",
"= kavutil.get_uint32(self.mm, 0x48) # 추가적인 Big Block을 계산한다. BBD List와 XBBD 블록도 추가될",
"t_data = data + ('\\x00' * ((n * self.ssize) - len(data))) # 여분의",
"self.bsize) else 0) # 추가해야 할 Big Block 개수 self.__add_big_block_num(add_big_num) # Big Block",
"p in self.pps: if p['Valid'] is False: # 유효한 Tree가 아니면 다음 continue",
"== 0xffffffff and i < r_size / self.ssize)] if len(free_link) >= num: #",
"OLE 헤더와 동일 if mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] = 'OLE' # OLE 뒤에",
"0xffffffff else '%4d ' % p['Dir'] t += ' - ' if p['Start']",
"8] + 1) * self.bsize off += (n % 8) * self.ssize self.mm",
"= None return data # --------------------------------------------------------------------- # arcclose(self) # 압축 파일 핸들을 닫는다.",
"buf == 'D0CF11E0A1B11AE1'.decode('hex'): return True except IOError: pass return False # --------------------------------------------------------------------- #",
"# 0x3800 - 0x383F # the value contains two characters ch -= 0x3800",
"and off == 0x180: buf = buf[:off] + '\\x00' * 0x80 elif del_info:",
"root_startblock = kavutil.get_uint32(self.mm, 0x30) root_list_array = get_block_link(root_startblock, self.bbd_fat) self.root_list_array = root_list_array self.root =",
"self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) target_pps = self.pps[no] if",
"# prev_no == 0xffffffff and next_no == 0xffffffff: # 단일 노드 # 1.",
"num_of_xbbd_blocks, xbbd_start_block) bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) bb_num = (self.bsize/4) # 한개의",
"if self.verbose: open('bbd.dmp', 'wb').write(self.bbd) print kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd, 0, 0x80) # Root 읽기",
"root['Size'] r_no = root['Start'] # SBD 링크를 생성한다. sbd_link = [] for i",
"i, n in enumerate(t_link): off = (n + 1) * self.bsize self.mm =",
"BBD 링크를 줄인다 # org_link_list : 기존 Small block 링크 # num_link :",
"1. prev 노드 값을 root로 보낸다. t_no = prev_no # 2. prev 노드",
"link # add_num : 추가 BBD link 개수 # --------------------------------------------------------------------- def __modify_big_block_link(self, old_link,",
"= struct.pack('<L', no) no = t_link[i] sbd = sbd[:no*4] + data + sbd[(no+1)*4:]",
"= prev_no elif prev_no == 0xffffffff and next_no != 0xffffffff: # Next만 존재",
"print self.parent.verbose # 연속된 숫자 값을 리턴한다. # TODO : 임시로 작성한거라 최적화",
"uninit(self): # 플러그인 엔진 종료 return 0 # 플러그인 엔진 종료 성공 #",
"self.sbd_fat, self.root_list_array, self.small_block, self.verbose) target_pps = self.pps[no] if target_pps['Valid'] and target_pps['Type'] == 2:",
"수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 BBD의 링크는 모두 삭제한다.",
"Block 번호 n = (len(self.bbd) / 4 - 1) - last_no if n",
"= '' # b_data = '' # add_data = '' add_num = num",
"# 원래 이미지에 BBD 덮어쓰기 self.__modify_bbd(bbd) return ret_link # 연결된 링크 # ---------------------------------------------------------------------",
"self.root_list_array = root_list_array self.root = '' for no in root_list_array: self.root += get_bblock(self.mm,",
"break else: no = -1 if no == -1: raise Error('PPS name(%s) is",
"# 상당히 많은 데이터가 출력되어 주석 처리 if self.verbose: print if num_of_bbd_blocks <",
"prefix + pps_name else: if self.pps[node]['Valid'] is False: # 유효한 PPS만 처리함 return",
"(add_num % (self.bsize/4)) else 0) old_num_bbd = kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block = kavutil.get_uint32(self.mm, 0x44)",
"--------------------------------------------------------------------- def get_bblock(buf, no, bsize): off = (no+1) * bsize return buf[off:off+bsize] #",
"types.StringType): if os.path.exists(input_data): self.isfile = True self.fname = input_data self.fp = open(input_data, 'rb')",
"struct.pack('<L', no) + self.mm[off+4:]) # --------------------------------------------------------------------- # Small Block을 주어진 개수만큼 추가한다. #",
"= [] for i in range(len(self.sbd) / 4): t_link.append(kavutil.get_uint32(self.sbd, i * 4)) t",
"% root_startblock) print kavutil.HexDump().Buffer(self.root, 0, 0x80) # sbd 읽기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c)",
"ord(ct[x]) def DecodeStreamName(name): wch = [] och = [] for i in range(len(name)",
"small bloc 크기 구하기 self.bsize = 1 << kavutil.get_uint16(self.mm, 0x1e) self.ssize = 1",
"* self.ssize) t_num = (t_size / self.ssize) + (1 if (t_size % self.ssize)",
"이름]] # --------------------------------------------------------------------- def arclist(self, filename, fileformat): file_scan_list = [] # 검사 대상",
"뒤쪽 쓸모 없는 부분은 제거 attach_data = self.mm[size:] # 파일 뒤에 붙어 있는",
"root를 찾기 root_no = self.__get_root_node(del_no) # 양쪽 노드가 존재하는가? if prev_no != 0xffffffff",
"t = get_bblock(self.mm, t_no, self.bsize) # print repr(t) # t = kavutil.get_uint32(t, off*4)",
"만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS 크기 수정 self.__set_pps_header(no, size=len(data)) return self.mm #",
"# 전체 BBD 링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) # BBD를",
"ow.delete(no) # 링크 삭제 if t: self.init(t) # 새롭게 OLE 재로딩 # ---------------------------------------------------------------------",
"self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link: return org_link_list else: raise Error('Invalid",
"else 0) off = (t_idx % ((bsize / 4) - 1)) next_b =",
"kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd += get_bblock(self.mm, no, self.bsize) self.bbd_fat = {} for i in",
"kavutil.HexDump().Buffer(self.root, 0, 0x80) # sbd 읽기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks = kavutil.get_uint32(self.mm,",
"[] # 검사 대상 정보를 모두 가짐 # 미리 분석된 파일 포맷중에 OLE",
"4): bbd_list_array.append(kavutil.get_uint32(t, i * 4)) for i, n in enumerate(bbd_list_array): self.__set_bblock(n, self.bbd[i*self.bsize:(i+1)*self.bsize]) return",
"0x4c, num_of_bbd_blocks * 109) next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data =",
"버퍼로 바꾸기 self.sbd = '' for i in t_link: self.sbd += struct.pack('<L', i)",
"= (len(self.bbd) / 4 - 1) - last_no if n >= num: #",
"List와 XBBD 블록도 추가될 수 있기 때문에... old_b_num = b_num while True: if",
"get_bblock(buf, no, bsize): off = (no+1) * bsize return buf[off:off+bsize] # --------------------------------------------------------------------- #",
"# OleWriteStream 클래스 # --------------------------------------------------------------------- class OleWriteStream: def __init__(self, mm, pps, bsize, ssize,",
"너무 많음 o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('FileHeader') d = pics.read()",
"BBD Blocks', '%d' % num_of_bbd_blocks) kavutil.vprint(None, 'XBBD Start', '%08X' % xbbd_start_block) kavutil.vprint(None, 'Num",
"+ '/' + pps_name # print (\"%02d : %d %s\") % (node, self.deep,",
"if (t_size % self.bsize) else 0) self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 #",
"OLE의 BBD 리스트를 얻는다. # --------------------------------------------------------------------- def get_bbd_list_array(buf, verbose=False): bbd_list_array = buf[0x4c:0x200] #",
"블록 수 add_data = ('\\x00' * self.bsize * add_num) # 추가해야 할 BBD",
"+= get_bblock(self.mm, no, self.bsize) if self.verbose: open('bbd.dm2', 'wb').write(bbd) bbd_link = [] for i",
"och.append(MsiBase64Encode(ch & 0x3f)) ch = MsiBase64Encode(((ch >> 6) & 0x3f)) och.append(ch) ret_str =",
"진단/치료 가능한 악성코드의 리스트를 알려준다. # 리턴값 : 악성코드 리스트 # --------------------------------------------------------------------- def",
"+ 0x48 buf = buf[:t_off] + struct.pack('<L', pps_next) + buf[t_off + 4:] if",
"수 추가하여 링크를 새롭게 생성 # Root 크기 수정 self.__set_pps_header(0, size=r_size + add_big_num",
"(next_b + 1) * self.bsize # t_data의 위치 self.mm = self.mm[:off] + t_data",
"self.bsize # t_data의 위치 self.mm = self.mm[:off] + t_data + self.mm[off + self.bsize:]",
"# 메시지 출력 함수 # ------------------------------------------------------------------------- __version__ = '1.0' # ------------------------------------------------------------------------- # 엔진",
"num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e) if idx >=",
"0x48) # 추가적인 Big Block을 계산한다. BBD List와 XBBD 블록도 추가될 수 있기",
"--------------------------------------------------------------------- def mkarc(self, arc_engine_id, arc_name, file_infos): if arc_engine_id == 'arc_ole': o = OleFile(arc_name,",
"- len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.sbd_fat) #",
"ret.append(next_b) while True: try: next_b = fat[next_b] if next_b == 0xfffffffe: break if",
"= sbd[:no*4] + '\\xff\\xff\\xff\\xff' + sbd[(no+1)*4:] self.__modify_sbd(sbd) else: # SBD를 사용한다. if org_size",
"* self.ssize:(i + 1) * self.ssize] + self.mm[off + self.ssize:] # --------------------------------------------------------------------- #",
"<= ch <= 0x4840: if ch >= 0x4800: # 0x4800 - 0x483F #",
"엔진 초기화 self.handle = {} self.verbose = verbose return 0 # 플러그인 엔진",
"is invalid.') # 수정 모드 self.write_mode = write_mode # OLE 주요 데이터 self.mm",
"self.bsize * b_num for i in range(b_num): bbd_no.append(last_no) last_no += 1 # 최종",
"i in range(num_of_xbbd_blocks): t_data = get_bblock(self.mm, next_b, self.bsize) print kavutil.HexDump().Buffer(self.mm, (next_b+1) * self.bsize)",
"next_b = xbbd_start_block for i in range(seg): if next_b == 0xfffffffe: return -1",
"n = self.root_list_array[node / 4] buf = get_bblock(self.mm, n, self.bsize) off = ((node",
"= bbd[:no*4] + '\\xff\\xff\\xff\\xff' + bbd[(no+1)*4:] self.__modify_bbd(bbd) else: # 기존에는 SBD 사용 if",
"self.ssize:(i + 1) * self.ssize] + self.mm[off + self.ssize:] # --------------------------------------------------------------------- # OLE",
"ret # --------------------------------------------------------------------- # OLE 블록 읽기 # --------------------------------------------------------------------- def get_bblock(buf, no, bsize):",
"get_bblock(self.mm, no, self.bsize) self.sbd_fat = {} for i in range(len(self.sbd) / 4): n",
"bbd 읽기 self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) ''' # 상당히",
"# zfile = zipfile.ZipFile(arc_name, 'w') for file_info in file_infos: rname = file_info.get_filename() a_name",
"len(org_link_list) > num_link: # BBD를 배열로 바꾸기 t_link = [] for i in",
"= kavutil.get_uint32(buf, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize = 1 << kavutil.get_uint16(buf, 0x1e)",
"i in t[1:]: t_link[i] = 0xffffffff # SBD 배열을 SBD 버퍼로 바꾸기 self.sbd",
"노드 값을 root로 보낸다. t_no = prev_no elif prev_no == 0xffffffff and next_no",
"압축 해제할 파일 이름 # 리턴값 : 압축 해제된 내용 or None #",
"not ch: continue else: # 0x3800 - 0x383F # the value contains two",
"수집하기 bbd = self.bbd for no in t_link: bbd = bbd[:no*4] + '\\xff\\xff\\xff\\xff'",
"0x1 == 0x1), 'encrypt': (val & 0x2 == 0x2), 'viewtext': (val & 0x4",
"else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next']) if self.pps[x]['Dir'] != 0xffffffff: if self.pps[x]['Dir'] in scaned_pps_node: self.pps[x]['Dir'] =",
"# --------------------------------------------------------------------- # Small Block을 주어진 개수만큼 추가한다. # num : 추가할 Big",
"0: return [] sbd = self.sbd if self.verbose: open('sbd.dm2', 'wb').write(sbd) # SBD 링크를",
"t, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) bbd_list_array = [] for i",
"= sbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + sbd[(no + 1) * 4:] #",
"+ struct.pack('<L', start) + buf[t_off + 4:] if pps_prev is not None: t_off",
"모두 수집해서 한꺼번에 파일로 읽기 off = (s + 1) * self.read_size data",
"self.mm[size:] # 파일 뒤에 붙어 있는 잔여 데이터 # 전체 BBD 링크를 구한다",
"체크 !!! bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) for i in range(len(bbd_list_array) /",
"= off + 0x4C buf = buf[:t_off] + struct.pack('<L', pps_dir) + buf[t_off +",
"Dec, Inc가 의미 없음 n = (len(data) / self.ssize) + (1 if (len(data)",
"if size is not None: t_off = off + 0x78 buf = buf[:t_off]",
"if old_link: ret_link = old_link + free_link[:add_num] # 최종 결과의 SBD 링크 t_link",
"n = len(sbd) % self.bsize if n: t = self.bsize - n sbd",
"(no == 0xffffffff and i < r_size / self.ssize)] if len(free_link) >= num:",
"# 이전 링크 수집하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 bbd",
"'%2d %-35s %d %22s %8d' % (self.pps.index(p), tname, p['Type'], t, p['Size']) # PPS",
"--------------------------------------------------------------------- # OleWriteStream 클래스 # --------------------------------------------------------------------- class OleWriteStream: def __init__(self, mm, pps, bsize,",
"처리 (bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block) bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) bb_num =",
"if org_size >= 0x1000: # 기존에는 BBD 사용 if org_size >= len(data): #",
"(self.pps.index(p), tname, p['Type'], t, p['Size']) # PPS 전체 경로 구하기 self.__deep = 0",
"Storage') ''' print ' %-2s %-20s %4s %-8s %-8s %-8s %-8s %-8s' %",
"# if self.pps[node]['Type'] != 5: # Stream만 저장 p = {'Node': node, 'Name':",
"-> BBD') # 섹터가 변화는 것은 Dec, Inc가 의미 없음 n = (len(data)",
"'\\xE0\\xF5\\x6B\\x99\\x44\\x80\\x50\\x46\\xAD\\xEB\\x0B\\x01\\x39\\x14\\xE9\\x9C', '\\xE6\\x3F\\x83\\x66\\x83\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28', '\\x5F\\xDC\\x81\\x91\\x7D\\xE0\\x8A\\x41\\xAC\\xA6\\x8E\\xEA\\x1E\\xCB\\x8E\\x9E', '\\xB6\\x90\\x41\\xC7\\x89\\x85\\xD1\\x11\\xB1\\x6A\\x00\\xC0\\xF0\\x28\\x36\\x28' ] if pps[0x50:0x60] in cve_clsids: self.exploit.append('Exploit.OLE.CVE-2012-0158') return False self.pps.append(p)",
": arc_engine_id - 압축 가능 엔진 ID # arc_name - 최종적으로 압축될 압축",
"1) * bsize + (off * 4) # --------------------------------------------------------------------- # OLE 파일인지 확인한다.",
"zipfile.ZipFile(arc_name, 'w') for file_info in file_infos: rname = file_info.get_filename() a_name = file_info.get_filename_in_archive() try:",
"write_mode=True, verbose=True) o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True) print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') # Root 수정,",
"not None: t_off = off + 0x74 buf = buf[:t_off] + struct.pack('<L', start)",
"min(kavutil.get_uint16(pps, 0x40), 0x40) if t_size != 0: # 출력시 이름이 깨질 가능성이 큼",
"초기화 self.handle = {} self.verbose = verbose return 0 # 플러그인 엔진 초기화",
"self.exploit.append('Exploit.OLE.CVE-2012-0158') return False self.pps.append(p) # PPS Tree 검증 if self.__valid_pps_tree() is False: return",
"print '[-] rname :', o.write_stream(a_name, buf) # zfile.writestr(a_name, buf) else: # 삭제 처리",
"구하기 # --------------------------------------------------------------------- def get_block_link(no, bbd_or_sbd_fat): ret = [] fat = bbd_or_sbd_fat next_b",
"if os.path.exists(rname): with open(rname, 'rb') as fp: buf = fp.read() # print '[-]",
"SBD 링크를 생성한다. sbd_link = [] for i in range(len(sbd) / 4): sbd_link.append(kavutil.get_uint32(sbd,",
"p['Name'] == name: return True else: return False # --------------------------------------------------------------------- # 스트림을 연다",
"체크하기 last_no = (size / self.bsize) - 2 # 실제 마지막 Big Block",
"엔진 ID # arc_name - 압축 파일 # fname_in_arc - 압축 해제할 파일",
"i in range(len(name) / 2): wch.append(kavutil.get_uint16(name, i * 2)) for ch in wch:",
"엔진 종료 return 0 # 플러그인 엔진 종료 성공 # --------------------------------------------------------------------- # getinfo(self)",
"self.handle = {} self.verbose = verbose return 0 # 플러그인 엔진 초기화 성공",
"= self.ssize fat = self.sbd # org_list_array = get_block_link(org_sb, fat) ''' # 수정된",
"write_mode=True, verbose=True) pics = o.openstream('Scripts/DefaultJScript') d = pics.read() d = zlib.decompress(d, -15) d",
"없음 n = (len(data) / self.bsize) + (1 if (len(data) % self.bsize) else",
"링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 sbd = self.sbd",
"def read(self): pps = self.parent.pps[self.node] sb = pps['Start'] size = pps['Size'] if size",
"%s\") % (node, self.deep, name) # if self.pps[node]['Type'] != 5: # Stream만 저장",
"self.root_list_array = root_list_array self.small_block = small_block def __get_root_node(self, node): # 해당 정보를 가진",
"t_link[i] = 0xffffffff # SBD 배열을 SBD 버퍼로 바꾸기 self.sbd = '' for",
"else 0) self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된 마지막 링크 이후에",
"(True or False) # --------------------------------------------------------------------- def mkarc(self, arc_engine_id, arc_name, file_infos): if arc_engine_id ==",
"링크를 모두 수집해서 한꺼번에 파일로 읽기 off = (s + 1) * self.read_size",
"ch in wch: if 0x3800 <= ch <= 0x4840: if ch >= 0x4800:",
"- 실패 # --------------------------------------------------------------------- def uninit(self): # 플러그인 엔진 종료 return 0 #",
"self.__deep = 0 self.__full_list = [] try: self.__get_pps_path() except IndexError: pass # small",
"MsiBase64Encode(ch - 0x4800) if not ch: continue else: # 0x3800 - 0x383F #",
"kavutil.get_uint32(self.mm, 0x40) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) self.sbd = '' for no in sbd_list_array:",
"= len(t_data) / self.ssize # 몇개의 블록이 필요한가? self.__add_small_block_num(t_num) # 필요한 블록 수",
"len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전",
"self.bsize off += (n % 8) * self.ssize self.mm = self.mm[:off] + t_data[i",
"실패 # --------------------------------------------------------------------- def uninit(self): # 플러그인 엔진 종료 return 0 # 플러그인",
"= len(t_data) - (len(t_link) * self.bsize) t_num = (t_size / self.bsize) + (1",
"= False # CVE-2012-0158 검사하기 # pps에 ListView.2의 CLSID가 존재함 # 참고 :",
"양쪽 모두 노트가 존재함 # 1. prev 노드 값을 root로 보낸다. t_no =",
"None self.root = None self.pps = None self.small_block = None self.root_list_array = None",
"[i for i, no in enumerate(sbd_link) if (no == 0xffffffff)] if old_link: ret_link",
": 압축 성공 여부 (True or False) # --------------------------------------------------------------------- def mkarc(self, arc_engine_id, arc_name,",
"이전 링크 수집하기 t_num = 0 if (len(t_link) * self.bsize) < len(t_data): #",
"이름 # file_infos - 압축 대상 파일 정보 구조체 # 리턴값 : 압축",
"109) next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(self.mm, next_b, self.bsize)",
"num_link): if len(org_link_list) > num_link: # BBD를 배열로 바꾸기 t_link = [] for",
"추가 BBD link 개수 # --------------------------------------------------------------------- def __modify_big_block_link(self, old_link, add_num): if add_num <",
"재로딩 # --------------------------------------------------------------------- # 스트림 또는 스토리지를 삭제한다. # --------------------------------------------------------------------- def delete(self, name,",
"p['Type'] = ord(pps[0x42]) p['Prev'] = kavutil.get_uint32(pps, 0x44) p['Next'] = kavutil.get_uint32(pps, 0x48) p['Dir'] =",
"- 파일 이름 # fileformat - 파일 포맷 분석 정보 # 리턴값 :",
"if n >= num: # 잔여 개수가 추가하려는 개수보다 많거나 같으면 추가 블록",
"0xffffffff else '%4d ' % p['Next'] t += ' - ' if p['Dir']",
"포맷 분석 정보 # 리턴값 : [[압축 엔진 ID, 압축된 파일 이름]] #",
"# 진단/치료하는 악성코드 이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return vlist # --------------------------------------------------------------------- #",
"range(seg): if next_b == 0xfffffffe: return -1 t_buf = get_bblock(buf, next_b, bsize) next_b",
"False if self.verbose: print kavutil.vprint('Property Storage') ''' print ' %-2s %-20s %4s %-8s",
"처리해야 함 x_data = '' # b_data = '' # add_data = ''",
"self.parent.verbose: print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0, 80) return data[:size] def close(self): pass # -----------------------------------------------------------------",
"fat) ''' # 수정된 data를 쓰기 위해 준비한다 if len(data) >= 0x1000: #",
"ssize, bbd, bbd_fat, sbd, sbd_fat, root_list_array, small_block, verbose): self.verbose = verbose self.mm =",
"OLE 뒤에 첨부된 파일이 있는지를 조사한다. fsize = len(mm) bsize = 1 <<",
"and self.pps[0]['Type'] == 5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] = True if len(f) == 0:",
"(no + 1) * self.bsize if len(data) == self.bsize: self.mm = self.mm[:off] +",
"== 5: f.append(self.pps[0]['Dir']) scaned_pps_node.append(self.pps[0]['Dir']) self.pps[0]['Valid'] = True if len(f) == 0: # 정상적인",
"= off + 0x74 buf = buf[:t_off] + struct.pack('<L', start) + buf[t_off +",
"모두 지우기 self.__set_pps_header(del_no, size=0, start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True) return self.mm def write(self,",
"else: special_no = [] # 특수 목적의 Big Block 번호. 해당 블록은 0xfffffffd로",
"Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정, start",
"filename, fileformat) # 압축 파일 내부의 파일 목록을 얻는다. # 입력값 : filename",
"info['make_arc_type'] = kernel.MASTER_PACK # 악성코드 치료 후 재압축 유무 info['sig_num'] = len(self.listvirus()) #",
"return zfile # --------------------------------------------------------------------- # arclist(self, filename, fileformat) # 압축 파일 내부의 파일",
"+= get_bblock(self.mm, no, self.bsize) if self.verbose: open('root.dmp', 'wb').write(self.root) print kavutil.vprint('ROOT') kavutil.vprint(None, 'Start Blocks',",
"False # bbd 읽기 self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm, self.verbose) '''",
"def get_bblock(buf, no, bsize): off = (no+1) * bsize return buf[off:off+bsize] # ---------------------------------------------------------------------",
"--------------------------------------------------------------------- def unarc(self, arc_engine_id, arc_name, fname_in_arc): data = None if arc_engine_id == 'arc_ole':",
"self.__modify_small_block_link(None, t_num) bbd_list_array, _, _, _ = get_bbd_list_array(self.mm) self.bbd = '' for i",
"if self.pps[x]['Dir'] in scaned_pps_node: self.pps[x]['Dir'] = 0xffffffff else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return True #",
"get_bblock(self.mm, n, self.bsize) # 새로운 Small Block 링크가 필요하다 self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat)",
"특정 데이터를 small block 링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_small_bolck(self,",
"* 4] + '\\xfe\\xff\\xff\\xff' + sbd[(no + 1) * 4:] # SBD가 나누어",
"{파일 포맷 분석 정보} or None # --------------------------------------------------------------------- def format(self, filehandle, filename, filename_ex):",
"= 0xffffffff else: f.append(self.pps[x]['Dir']) scaned_pps_node.append(self.pps[x]['Dir']) return True # --------------------------------------------------------------------- # PPS 전체 경로",
"org_link_list[:num_link] t_link[t[0]] = 0xfffffffe # 링크 끝 설정하기 # 남은 링크는 모두 0xffffffff로",
"self.sbd = '' for i in t_link: self.sbd += struct.pack('<L', i) # self.mm에",
"read_size = self.bsize fat = self.bbd else: # read_size = self.ssize fat =",
"b_num = (add_num / (self.bsize / 4)) + (1 if (add_num % (self.bsize",
"기존에는 SBD 사용 if org_size >= len(data): # raise error('Not Support : SBD",
"# 몇개의 블록이 필요한가? self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # BBD 링크를",
"zfile = zipfile.ZipFile(arc_name, 'w') for file_info in file_infos: rname = file_info.get_filename() a_name =",
"* self.bsize) < len(t_data): # 블록 추가해야 하나? t_size = len(t_data) - (len(t_link)",
"# 특수 블록에 BBD list도 추가 special_no += bbd_no # 특수 블록 처리",
"bbd = '' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(bbd_list_array, i*4) bbd +=",
"self.__full_list = None self.init(buf) def init(self, buf): # OLE 주요 데이터 self.mm =",
"한다. # 인력값 : plugins_path - 플러그인 엔진의 위치 # verbose - 디버그",
"else 0) self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기 # 수집된 마지막 링크 이후에",
"arc_engine_id - 압축 엔진 ID # arc_name - 압축 파일 # fname_in_arc -",
"if p['Name'] == name: no = p['Node'] break else: no = -1 if",
"# --------------------------------------------------------------------- def format(self, filehandle, filename, filename_ex): ret = {} mm = filehandle",
"name): for p in self.__full_list: if p['Name'] == name: return True else: return",
"추출하기 o = self.__get_handle(filename) for name in o.listdir(): file_scan_list.append(['arc_ole', name]) return file_scan_list except:",
"break else: no = -1 if no == -1: raise Error('PPS name is",
":', rname, len(buf) # print '[-] rname :', o.write_stream(a_name, buf) # zfile.writestr(a_name, buf)",
"찾는다. free_link = [i for i, no in enumerate(bbd_link) if (no == 0xffffffff",
"줄이기 # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기",
"# uninit(self) # 플러그인 엔진을 종료한다. # 리턴값 : 0 - 성공, 0",
"0) x_num = total_xbbd_num - num_of_xbbd_blocks # 추가해야 할 XBBD 개수 add_num +=",
"((n * self.bsize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 t_link =",
"self.__full_list.append(p) if self.pps[node]['Dir'] != 0xFFFFFFFFL: self.__deep += 1 self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep -= 1",
"= '' for no in sbd_list_array: self.sbd += get_bblock(self.mm, no, self.bsize) self.sbd_fat =",
"= self.mm[:size] # 뒤쪽 쓸모 없는 부분은 제거 attach_data = self.mm[size:] # 파일",
"0 if (len(t_link) * self.bsize) < len(t_data): # 블록 추가해야 하나? t_size =",
"def __decrease_bbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: # BBD를 배열로 바꾸기 t_link",
"print kavutil.HexDump().Buffer(self.bbd, 0, 0x80) # Root 읽기 root_startblock = kavutil.get_uint32(self.mm, 0x30) root_list_array =",
"len(mm) bsize = 1 << kavutil.get_uint16(mm, 0x1e) rsize = (fsize / bsize) *",
"# init(self, plugins_path) # 플러그인 엔진을 초기화 한다. # 인력값 : plugins_path -",
"ord(pps[0]) & 0xF0 == 0x00 and ord(pps[1]) == 0x00: name = '_\\x00' +",
"가진 node를 찾기 no = node while True: pps = self.pps[no] if pps['Next']",
"signature') # big block, small bloc 크기 구하기 self.bsize = 1 << kavutil.get_uint16(self.mm,",
"self.fp = open(input_data, 'rb') buf = self.fp.read() else: buf = input_data else: raise",
"OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) target_pps =",
"sbd = self.sbd if self.verbose: open('sbd.dm2', 'wb').write(sbd) # SBD 링크를 생성한다. sbd_link =",
"verbose=True) # zfile = zipfile.ZipFile(arc_name, 'w') for file_info in file_infos: rname = file_info.get_filename()",
"있음 b_num = (add_num / (self.bsize/4)) + (1 if (add_num % (self.bsize/4)) else",
"유효한 Tree가 아니면 다음 continue t = '' t += ' - '",
"self.small_block, self.verbose) t = ow.write(no, data) if t: self.init(t) # 새롭게 OLE 재로딩",
"> 63: return None return ord(ct[x]) def DecodeStreamName(name): wch = [] och =",
"리턴값 : 0 - 성공, 0 이외의 값 - 실패 # --------------------------------------------------------------------- def",
"fsize = len(mm) bsize = 1 << kavutil.get_uint16(mm, 0x1e) rsize = (fsize /",
"0xffffffff else '%8X ' % p['Start'] tname = p['Name'].encode(sys.stdout.encoding, 'replace') print ' '",
"if (t_idx % ((bsize / 4) - 1)) else 0) off = (t_idx",
"self.bbd = bbd self.bbd_fat = bbd_fat self.sbd = sbd self.sbd_fat = sbd_fat self.root_list_array",
"self.bbd_fat) # 이전 링크 수집하기 t_num = 0 if (len(t_link) * self.bsize) <",
"(off * 4) # --------------------------------------------------------------------- # OLE 파일인지 확인한다. # --------------------------------------------------------------------- def is_olefile(filename):",
"처리 return False # bbd 읽기 self.bbd_list_array, num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block = \\ get_bbd_list_array(self.mm,",
"off += (n % 8) * self.ssize self.mm = self.mm[:off] + t_data[i *",
"self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep -= 1 if self.pps[node]['Prev'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Prev'], prefix) if self.pps[node]['Next']",
"수 추가하기 # SBD 링크를 처음 생성하므로 이전 링크가 없다. t_link = self.__modify_small_block_link(None,",
"del_info: buf = buf[:off] + '\\x00' * 0x80 + buf[off+0x80:] if size is",
"if t: self.init(t) # 새롭게 OLE 재로딩 # --------------------------------------------------------------------- # 스트림 또는 스토리지를",
"= self.mm[size:] # 파일 뒤에 붙어 있는 잔여 데이터 # 전체 BBD 링크를",
"= kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks = kavutil.get_uint32(self.mm, 0x48) # 추가적인 Big Block을 계산한다. BBD",
"값은 모두 지우기 self.__set_pps_header(del_no, size=0, start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True) return self.mm def",
"존재함... return # 추가할 필요 없음 # 잔여 개수 체크하기 last_no = (size",
"t_data[i * self.bsize:(i + 1) * self.bsize] + self.mm[off + self.bsize:] # ---------------------------------------------------------------------",
"Inc가 의미 없음 n = (len(data) / self.ssize) + (1 if (len(data) %",
"self.bsize) else 0) t_data = data + ('\\x00' * ((n * self.bsize) -",
"len(data) >= 0x1000: # BBD를 사용한다. if org_size >= 0x1000: # 기존에는 BBD",
"'Type': self.pps[node]['Type']} self.__full_list.append(p) if self.pps[node]['Dir'] != 0xFFFFFFFFL: self.__deep += 1 self.__get_pps_path(self.pps[node]['Dir'], name) self.__deep",
"buf[t_off + 4:] if pps_prev is not None: t_off = off + 0x44",
"<= 109: return 0x4c + (idx * 4) else: t_idx = idx -",
"self.__modify_bbd(bbd) return ret_link # 연결된 링크 # --------------------------------------------------------------------- # SBD link 추가 요청한다.",
"def __init__(self, mm, pps, bsize, ssize, bbd, bbd_fat, sbd, sbd_fat, root_list_array, small_block, verbose):",
"4)) t = org_link_list[num_link:] org_link_list = org_link_list[:num_link] t_link[t[0]] = 0xfffffffe # 링크 끝",
"# add_num : 추가 BBD link 개수 # --------------------------------------------------------------------- def __modify_big_block_link(self, old_link, add_num):",
"__init__(self, parent, node): self.parent = parent self.node = node self.read_size = 0 self.fat",
"self.__modify_small_block_link(t_link, t_num) # Small block 갱신 self.bbd_fat = {} for i in range(len(self.bbd)",
"def uninit(self): # 플러그인 엔진 종료 return 0 # 플러그인 엔진 종료 성공",
"- 파일 포맷 분석 정보 # 리턴값 : [[압축 엔진 ID, 압축된 파일",
"* self.bsize off += (n % 8) * self.ssize self.mm = self.mm[:off] +",
"except: data = None return data # --------------------------------------------------------------------- # arcclose(self) # 압축 파일",
"import kernel import kavutil # ------------------------------------------------------------------------- # 메시지 출력 함수 # ------------------------------------------------------------------------- __version__",
"__write_data_to_big_block(self, t_data, t_link): for i, n in enumerate(t_link): off = (n + 1)",
"verbose=True) pics = o.openstream('FileHeader') d = pics.read() d = d + d o.write_stream('FileHeader',",
"# 입력값 : filename - 파일 이름 # fileformat - 파일 포맷 분석",
"OleFile('xbbd2.ppt', write_mode=True, verbose=True) # o.test() ''' # 늘어나는건 경우의 수가 너무 많음 o",
"Root 크기 수정 self.__set_pps_header(0, size=r_size + add_big_num * self.bsize) # --------------------------------------------------------------------- # BBD",
"link가 수정 됨) # old_link : 기존 BBD link # add_num : 추가",
"pps['Next'] == del_no: self.__set_pps_header(root_no, pps_next=t_no) else: # Dir self.__set_pps_header(root_no, pps_dir=t_no) # 삭제 노드",
"org_list_array = get_block_link(org_sb, fat) ''' # 수정된 data를 쓰기 위해 준비한다 if len(data)",
"= n self.small_block = get_block_link(self.pps[0]['Start'], self.bbd_fat) # Small block 영역에 ssize 만큼씩 Overwrite",
"try: pics = o.openstream('FileHeader') d = pics.read() if d[:0x11] == 'HWP Document File':",
"else: for n in list_array: div_n = self.parent.bsize / self.parent.ssize off = (self.parent.small_block[n",
"and target_pps['Type'] == 2: # 유효한 PPS에 대한 삭제인지 확인 if reset_stream: size",
"= o.openstream('Scripts/DefaultJScript') d = pics.read() d = zlib.decompress(d, -15) d = d.replace(b'v\\x00a\\x00r', b'f\\x00o\\x00o')",
"0 - 성공, 0 이외의 값 - 실패 # --------------------------------------------------------------------- def uninit(self): #",
"# PPS 전체 경로 구하기 self.__deep = 0 self.__full_list = [] try: self.__get_pps_path()",
"fname in self.handle.keys(): zfile = self.handle[fname] zfile.close() self.handle.pop(fname) # --------------------------------------------------------------------- # mkarc(self, arc_engine_id,",
"= open(filename, 'rb').read(8) if buf == 'D0CF11E0A1B11AE1'.decode('hex'): return True except IOError: pass return",
"필요한 데이터 블록 self.mm += attach_data else: special_no = [] # 특수 목적의",
"# 추가해야 할 용량 add_big_num = (size / self.bsize) + (1 if (size",
"+ 1) * self.bsize self.mm = self.mm[:off] + data + self.mm[off+self.bsize:] # ---------------------------------------------------------------------",
"새로운 XBBD 링크 추가 t_data = t_data[:-4] + struct.pack('<L', last_no) off = (next_b",
"len(t_data): # 블록 추가해야 하나? t_size = len(t_data) - (len(t_link) * self.bsize) t_num",
"t_link[i] = 0xffffffff # BBD 배열을 BBD 버퍼로 바꾸기 self.bbd = '' for",
"t_num) # Small block 갱신 self.bbd_fat = {} for i in range(len(self.bbd) /",
"o = OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('Scripts/DefaultJScript') d = pics.read() d =",
"# 파일 뒤에 붙어 있는 잔여 데이터 # 전체 BBD 링크를 구한다 bbd_list_array,",
"마지막 링크 이후에 존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_small_block_link(t_link, t_num) #",
"0x44) p['Next'] = kavutil.get_uint32(pps, 0x48) p['Dir'] = kavutil.get_uint32(pps, 0x4c) p['Start'] = kavutil.get_uint32(pps, 0x74)",
"self.__get_pps_path(self.pps[node]['Prev'], prefix) if self.pps[node]['Next'] != 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix) return 0 # --------------------------------------------------------------------- #",
"블록은 0xfffffffd로 처리해야 함 x_data = '' # b_data = '' # add_data",
"data + self.mm[0x4C:] else: data = struct.pack('<L', total_xbbd_num) self.mm = self.mm[:0x48] + data",
"if self.pps[x]['Type'] != 1 and self.pps[x]['Type'] != 2 and len(self.pps[x]['Name']) == 0: continue",
"'%08X' % xbbd_start_block) kavutil.vprint(None, 'Num of XBBD Blocks', '%d' % num_of_xbbd_blocks) if num_of_bbd_blocks",
"Wipe t = ow.delete(no) if t: self.init(t) # 새롭게 OLE 재로딩 elif target_pps['Valid']",
"0x20) if self.verbose: kavutil.vprint('Header') kavutil.vprint(None, 'Big Block Size', '%d' % self.bsize) kavutil.vprint(None, 'Small",
"링크를 구한다 bbd_list_array, num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) # BBD를 모은다 bbd =",
"self.__modify_sbd(sbd) # 수정된 SDB 적용하기 return ret_link # 연결된 링크 # --------------------------------------------------------------------- #",
"링크는 모두 0xffffffff로 설정하기 for i in t[1:]: t_link[i] = 0xffffffff # BBD",
"= get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_sbd_link(t_link, n) # 필요한",
"정보 구조체 # 리턴값 : 압축 성공 여부 (True or False) # ---------------------------------------------------------------------",
"------------------------------------------------------------------------- # KavMain 클래스 # ------------------------------------------------------------------------- class KavMain: # --------------------------------------------------------------------- # init(self, plugins_path)",
"(self.bsize / 4)) else 0) if old_b_num == b_num: break else: old_b_num =",
"decoded ch = MsiBase64Encode(ch - 0x4800) if not ch: continue else: # 0x3800",
"(t_size / self.bsize) + (1 if (t_size % self.bsize) else 0) self.__add_big_block_num(t_num) #",
"PPS Tree의 유효성을 체크한다. (내장) # --------------------------------------------------------------------- def __valid_pps_tree(self): scaned_pps_node = [0] #",
"추가할 Big Block 개수 # --------------------------------------------------------------------- def __add_small_block_num(self, num): root = self.pps[0] r_size",
"* self.bsize) + (off * 4) self.mm = self.mm[:t_off] + '\\xfd\\xff\\xff\\xff' + self.mm[t_off+4:]",
"'XBBD Start', '%08X' % xbbd_start_block) kavutil.vprint(None, 'Num of XBBD Blocks', '%d' % num_of_xbbd_blocks)",
"in t_link: self.sbd += struct.pack('<L', i) # self.mm에 SBD 적용하기 sbd_startblock = kavutil.get_uint32(self.mm,",
"이전 링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_num =",
"+= '\\xfe\\xff\\xff\\xff' # 마지막 블록의 링크는 끝을 처리함 special_no.append(last_no) # 특수 블록 등록",
"SBD 이미지 # --------------------------------------------------------------------- def __modify_sbd(self, sbd): # 원래 이미지에 SBD 덮어쓰기 sbd_no",
"= kavutil.get_uint32(bbd_list_array, i*4) bbd += get_bblock(self.mm, no, self.bsize) bbd_link = [] for i",
"total_xbbd_num = (t_num / ((self.bsize - 4) / 4)) + (1 if (t_num",
"# --------------------------------------------------------------------- # OLE 블록 읽기 # --------------------------------------------------------------------- def get_bblock(buf, no, bsize): off",
"pps[2:t_size-2] else: name = pps[0:t_size-2] p['Name'] = DecodeStreamName(name).decode('UTF-16LE', 'replace') else: p['Name'] = ''",
"# unarc(self, arc_engine_id, arc_name, fname_in_arc) # 입력값 : arc_engine_id - 압축 엔진 ID",
"추가 t_data = t_data[:-4] + struct.pack('<L', last_no) off = (next_b + 1) *",
"kavutil.vprint('Header') kavutil.vprint(None, 'Big Block Size', '%d' % self.bsize) kavutil.vprint(None, 'Small Block Size', '%d'",
"= target_pps['Size'] t = ow.write(no, '\\x00' * size) # 모든 데이터를 0으로 Wipe",
"if num_of_bbd_blocks > 109: # bbd list 개수가 109보다 크면 xbbd를 가져와야 함",
"+ b_num > 109: t_num = (old_num_bbd + b_num - 109) total_xbbd_num =",
"0x1000: # 기존에는 BBD 사용 if org_size >= len(data): # raise error('Not Support",
"buf = self.mm[:8] if buf != 'D0CF11E0A1B11AE1'.decode('hex'): raise Error('Not Ole signature') # big",
"prefix=''): if node == 0: pps_name = '' name = prefix + pps_name",
"0xffffffff # SBD 배열을 SBD 버퍼로 바꾸기 self.sbd = '' for i in",
"OleFile('normal.hwp', write_mode=True, verbose=True) o = OleFile('a82d381c20cfdf47d603b4b2b840136ed32f71d2757c64c898dc209868bb57d6', write_mode=True, verbose=True) print o.listdir() o.delete('_VBA_PROJECT_CUR/VBA') # Root",
"= free_link[:add_num] # 최종 결과의 BBD 링크 t_link = free_link[:add_num] # BBD에 링크",
"(1 if (size % self.bsize) else 0) # 추가해야 할 Big Block 개수",
"--------------------------------------------------------------------- # OLE 블록 읽기 # --------------------------------------------------------------------- def get_bblock(buf, no, bsize): off =",
"for i, n in enumerate(sbd_list_array): self.__set_bblock(n, self.sbd[i*self.bsize:(i+1)*self.bsize]) return org_link_list elif len(org_link_list) == num_link:",
"True if self.pps[x]['Prev'] != 0xffffffff: if self.pps[x]['Prev'] in scaned_pps_node: self.pps[x]['Prev'] = 0xffffffff else:",
"= OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) target_pps",
": SBD -> SBD (Inc)') # 작업 완료 n = (len(data) / self.ssize)",
"사용한다. if org_size >= 0x1000: # 기존에는 BBD 사용 # raise error('Not Support",
"= '' for i in t_link: self.sbd += struct.pack('<L', i) # self.mm에 SBD",
"= (self.small_block[n / 8] + 1) * self.bsize off += (n % 8)",
"뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_link = self.__decrease_sbd_link(t_link,",
"보낸다. t_no = next_no else: # prev_no == 0xffffffff and next_no == 0xffffffff:",
"Big Block을 계산한다. BBD List와 XBBD 블록도 추가될 수 있기 때문에... old_b_num =",
"= prev_no # 2. prev 노드 하위에 next가 없는 node를 찾아서 del_pps의 next_no를",
"bb_num = (self.bsize/4) # 한개의 BBD list 블록에 들어갈 수 있는 Big Block",
"def __modify_small_block_link(self, old_link, add_num): if add_num < 0: return [] sbd = self.sbd",
"self.__full_list: if p['Name'] == name: return True else: return False # --------------------------------------------------------------------- #",
"'' for i in range(num_of_bbd_blocks): no = kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd += get_bblock(self.mm, no,",
"블록도 추가될 수 있기 때문에... old_b_num = b_num while True: if old_num_bbd +",
"# 여유분이 충분히 존재함... return # 추가할 필요 없음 # 잔여 개수 체크하기",
"kavutil.HexDump().Buffer(self.bbd, 0, 0x80) # Root 읽기 root_startblock = kavutil.get_uint32(self.mm, 0x30) root_list_array = get_block_link(root_startblock,",
"값 - 실패 # --------------------------------------------------------------------- def uninit(self): # 플러그인 엔진 종료 return 0",
"진단/치료 가능한 악성코드 수 return info # --------------------------------------------------------------------- # listvirus(self) # 진단/치료 가능한",
"수정된 SBD 이미지 # --------------------------------------------------------------------- def __modify_sbd(self, sbd): # 원래 이미지에 SBD 덮어쓰기",
"o.write_stream('Scripts/DefaultJScript', d) o.close() ''' # ------------------------------------------------------------------------- # KavMain 클래스 # ------------------------------------------------------------------------- class KavMain:",
"' - ' if p['Prev'] == 0xffffffff else '%4d ' % p['Prev'] t",
"노트가 존재함 # 1. prev 노드 값을 root로 보낸다. t_no = prev_no #",
"# arclist(self, filename, fileformat) # 압축 파일 내부의 파일 목록을 얻는다. # 입력값",
"range(len(bbd_list_array)/4): n = kavutil.get_uint32(bbd_list_array, i*4) self.bbd += get_bblock(self.mm, n, self.bsize) # 새로운 Small",
"= kavutil.get_uint32(self.mm, 0x3c) sbd_list_array = get_block_link(sbd_startblock, self.bbd_fat) for i, n in enumerate(sbd_list_array): self.__set_bblock(n,",
"num_link): if len(org_link_list) > num_link: # SBD를 배열로 바꾸기 t_link = [] for",
"next 노드 값을 root로 보낸다. t_no = next_no else: # prev_no == 0xffffffff",
"self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array, self.small_block, self.verbose) target_pps = self.pps[no]",
"self.mm[off + self.bsize:] # --------------------------------------------------------------------- # 특정 데이터를 small block 링크를 따라 데이터",
"self.fp.close() if self.write_mode: open(self.fname, 'wb').write(self.mm) # --------------------------------------------------------------------- # OLE 파싱하기 # --------------------------------------------------------------------- def",
"xbbd를 가져와야 함 next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(buf,",
"if 0x3800 <= ch <= 0x4840: if ch >= 0x4800: # 0x4800 -",
"= 1 << kavutil.get_uint16(mm, 0x1e) rsize = (fsize / bsize) * bsize if",
"self.__modify_big_block_link(None, t_num) # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS",
"(len(self.mm) / self.bsize) * self.bsize # 파일 크기 self.mm = self.mm[:size] # 뒤쪽",
"self.verbose) t = ow.write(no, data) if t: self.init(t) # 새롭게 OLE 재로딩 #",
"(1 if (t_size % self.ssize) else 0) self.__add_small_block_num(t_num) # 필요한 블록 수 추가하기",
"num_link: return org_link_list else: raise Error('Invalid call') # --------------------------------------------------------------------- # Big Block을 주어진",
"True if len(f) == 0: # 정상적인 PPS가 없음 return False while len(f):",
"링크를 줄인다 # org_link_list : 기존 Small block 링크 # num_link : 필요로",
"print no ow = OleWriteStream(self.mm, self.pps, self.bsize, self.ssize, self.bbd, self.bbd_fat, self.sbd, self.sbd_fat, self.root_list_array,",
"# 2. prev 노드 하위에 next가 없는 node를 찾아서 del_pps의 next_no를 등록한다. blank_next_no",
"' Dir', 'SB', 'Size') print ' ' + ('-' * 74) for p",
"(내장) # --------------------------------------------------------------------- def __set_bblock(self, no, data): off = (no + 1) *",
"else: kavutil.HexDump().Buffer(self.mm, 0x4c, num_of_bbd_blocks * 109) next_b = xbbd_start_block for i in range(num_of_xbbd_blocks):",
"special_no.append(last_no) # 특수 블록 등록 last_no += 1 # END of XBBD #",
"!= 0xFFFFFFFFL: self.__get_pps_path(self.pps[node]['Next'], prefix) return 0 # --------------------------------------------------------------------- # PPS 전체 경로 구하기",
"self.pps[no] if pps['Next'] == 0xffffffff: # 더이상 오른쪽이 없으면 탐색 종료 break else:",
"링크를 따라 데이터 쓰기 (내장) # --------------------------------------------------------------------- def __write_data_to_small_bolck(self, t_data, t_link): for i,",
"109) total_xbbd_num = (t_num / ((self.bsize - 4) / 4)) + (1 if",
"'ignore') name = prefix + '/' + pps_name # print (\"%02d : %d",
"플러그인 엔진 종료 return 0 # 플러그인 엔진 종료 성공 # --------------------------------------------------------------------- #",
"= kavutil.get_uint32(self.bbd_list_array, i*4) self.bbd += get_bblock(self.mm, no, self.bsize) self.bbd_fat = {} for i",
"__decrease_sbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: # SBD를 배열로 바꾸기 t_link =",
"= self.parent.pps[self.node] sb = pps['Start'] size = pps['Size'] if size >= 0x1000: self.read_size",
": 0 - 성공, 0 이외의 값 - 실패 # --------------------------------------------------------------------- def init(self,",
"= OleFile('normal.hwp', write_mode=True, verbose=True) pics = o.openstream('Scripts/DefaultJScript') d = pics.read() d = zlib.decompress(d,",
"p = {'Node': node, 'Name': name[1:], 'Type': self.pps[node]['Type']} self.__full_list.append(p) if self.pps[node]['Dir'] != 0xFFFFFFFFL:",
"수집된 마지막 링크 이후에 존재하는 사용하지 않는 블록을 수집한다. t_link = self.__modify_small_block_link(t_link, t_num)",
"self.bbd) # 이전 Small Block의 링크를 구함 t_link = get_block_link(r_no, self.bbd_fat) # 이전",
"verbose - 디버그 모드 (True or False) # 리턴값 : 0 - 성공,",
"# 이전 링크 수집하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 sbd",
"XBBD 늘어나는 경우 # o = OleFile('xbbd2.ppt', write_mode=True, verbose=True) # o.test() ''' #",
"self.bsize) if self.verbose: open('bbd.dm2', 'wb').write(bbd) bbd_link = [] for i in range(len(bbd) /",
"x_data += '\\xff\\xff\\xff\\xff' * ((self.bsize/4) - 1) if i != (x_num-1): x_data +=",
"이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return vlist # --------------------------------------------------------------------- # format(self, filehandle, filename,",
"/ bb_num off = no % bb_num # print hex(no), hex(seg), hex(off), hex(kavutil.get_uint32(bbd_list_array,",
"False else: # CVE-2003-0347 취약점 self.exploit.append('Exploit.OLE.CVE-2003-0347') return False self.pps[x]['Valid'] = True if self.pps[x]['Prev']",
"크기를 data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 t_link",
"def __add_big_block_num(self, num): size = (len(self.mm) / self.bsize) * self.bsize # 파일 크기",
"4:] if pps_dir is not None: t_off = off + 0x4C buf =",
"# XBBD 늘어나는 경우 # o = OleFile('xbbd2.ppt', write_mode=True, verbose=True) # o.test() '''",
"return [] # --------------------------------------------------------------------- # unarc(self, arc_engine_id, arc_name, fname_in_arc) # 입력값 : arc_engine_id",
"size=len(data), start=t_link[0]) # 이전 SBD의 링크는 모두 삭제한다. # t_link = get_block_link(org_sb, self.sbd)",
"링크 수집하기 t_link = self.__decrease_bbd_link(t_link, n) # 필요한 개수로 링크 줄이기 # Big",
"= self.pps[node]['Name'].encode('cp949', 'ignore') name = prefix + '/' + pps_name # print (\"%02d",
"block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) # PPS 크기 수정, start 블록",
"if start is not None: t_off = off + 0x74 buf = buf[:t_off]",
"# 취약점 존재 여부 # 임시 변수 self.__deep = None self.__full_list = None",
"많거나 같으면 추가 블록 개수만 파일 뒤에 추가하기 self.mm += '\\x00' * self.bsize",
"o = OleFile(filename) try: pics = o.openstream('FileHeader') d = pics.read() if d[:0x11] ==",
"조정한다. (내장) # node : PPS 인덱스 # size : 설정 크기 #",
"off = (no + 1) * self.bsize self.mm = self.mm[:off] + data +",
"PPS 크기 수정, start 블록 수정 self.__set_pps_header(no, size=len(data), start=t_link[0]) # 이전 BBD의 링크는",
"t_data + self.mm[off + self.bsize:] # XBBD 생성하기 for i in range(x_num): x_data",
"== 0: data = struct.pack('<LL', last_no, total_xbbd_num) self.mm = self.mm[:0x44] + data +",
"특수 목적의 Big Block 번호. 해당 블록은 0xfffffffd로 처리해야 함 x_data = ''",
"num_of_bbd_blocks, _, _ = get_bbd_list_array(self.mm) bb_num = (self.bsize/4) # 한개의 BBD list 블록에",
"링크 줄이기 # Small block 영역에 ssize 만큼씩 Overwrite self.__write_data_to_small_bolck(t_data, t_link) # PPS",
"bbd_list_array = [] for i in range(len(t) / 4): bbd_list_array.append(kavutil.get_uint32(t, i * 4))",
"t += ' - ' if p['Next'] == 0xffffffff else '%4d ' %",
"# --------------------------------------------------------------------- # getinfo(self) # 플러그인 엔진의 주요 정보를 알려준다. (제작자, 버전, ...)",
"연속된 링크를 모두 수집해서 한꺼번에 파일로 읽기 off = (s + 1) *",
"t_link = get_block_link(org_sb, self.bbd_fat) # 이전 링크 수집하기 bbd = self.bbd for no",
"__get_handle(self, filename): if filename in self.handle: # 이전에 열린 핸들이 존재하는가? zfile =",
"if (no == 0xffffffff and i < r_size / self.ssize)] if len(free_link) >=",
"Error(Exception): pass # --------------------------------------------------------------------- # MisiBase64 인코더 디코더 # --------------------------------------------------------------------- def MsiBase64Encode(x): ct",
"# 삭제 처리 o.delete(a_name) except IOError: # print file_info.get_filename_in_archive() pass o.close() # zfile.close()",
"no, self.bsize) if self.verbose: open('bbd.dm2', 'wb').write(bbd) bbd_link = [] for i in range(len(bbd)",
"data += self.parent.mm[off:off + self.read_size] if self.parent.verbose: print kavutil.vprint(pps['Name']) kavutil.HexDump().Buffer(data, 0, 80) return",
"가져와야 함 next_b = xbbd_start_block for i in range(num_of_xbbd_blocks): t_data = get_bblock(buf, next_b,",
"# 작업 완료 n = (len(data) / self.ssize) + (1 if (len(data) %",
"'' # b_data = '' # add_data = '' add_num = num -",
"__valid_pps_tree(self): scaned_pps_node = [0] # 이미 분석한 노드의 경우 더이상 분석하지 않기 위해",
"t = '' t += ' - ' if p['Prev'] == 0xffffffff else",
"BBD 링크를 처음 생성하므로 이전 링크가 없다. t_link = self.__modify_big_block_link(None, t_num) # Big",
"== 0: pps_name = '' name = prefix + pps_name else: if self.pps[node]['Valid']",
"del_pps['Dir'] # root를 찾기 root_no = self.__get_root_node(del_no) # 양쪽 노드가 존재하는가? if prev_no",
"pass return [] # --------------------------------------------------------------------- # unarc(self, arc_engine_id, arc_name, fname_in_arc) # 입력값 :",
"start=None, pps_prev=None, pps_next=None, pps_dir=None, del_info=False): n = self.root_list_array[node / 4] buf = get_bblock(self.mm,",
"값임 no = pps['Next'] return no def delete(self, del_no): del_pps = self.pps[del_no] prev_no",
"blank_next_no = self.__get_max_node(prev_no) self.__set_pps_header(blank_next_no, pps_next=next_no) elif prev_no != 0xffffffff and next_no == 0xffffffff:",
"__modify_sbd(self, sbd): # 원래 이미지에 SBD 덮어쓰기 sbd_no = kavutil.get_uint32(self.mm, 0x3c) # sbd_list_array",
"노드 값은 모두 지우기 self.__set_pps_header(del_no, size=0, start=0xffffffff, pps_prev=0xffffffff, pps_next=0xffffffff, pps_dir=0xffffffff, del_info=True) return self.mm",
"in file_infos: rname = file_info.get_filename() a_name = file_info.get_filename_in_archive() try: if os.path.exists(rname): with open(rname,",
"블록이 필요한가? self.__add_big_block_num(t_num) # 필요한 블록 수 추가하기 # BBD 링크를 처음 생성하므로",
"ret = [] fat = bbd_or_sbd_fat next_b = no if next_b != 0xfffffffe:",
"파일 핸들 # --------------------------------------------------------------------- def __get_handle(self, filename): if filename in self.handle: # 이전에",
"= get_block_link(org_sb, self.bbd) # 이전 링크 수집하기 t_link = get_block_link(org_sb, self.bbd_fat) # 이전",
"= None if not start: start = num_list.pop(0) e = start loop =",
"elif prev_no == 0xffffffff and next_no != 0xffffffff: # Next만 존재 # 1.",
"d = zlib.compress(d)[2:] o.write_stream('Scripts/DefaultJScript', d) o.close() ''' # ------------------------------------------------------------------------- # KavMain 클래스 #",
"<< kavutil.get_uint16(buf, 0x1e) if verbose: kavutil.vprint(None, 'Num of BBD Blocks', '%d' % num_of_bbd_blocks)",
"def __decrease_sbd_link(self, org_link_list, num_link): if len(org_link_list) > num_link: # SBD를 배열로 바꾸기 t_link",
"# sbd 읽기 sbd_startblock = kavutil.get_uint32(self.mm, 0x3c) num_of_sbd_blocks = kavutil.get_uint32(self.mm, 0x40) sbd_list_array =",
"bbd_list_array += t_data[:-4] next_b = kavutil.get_uint32(t_data, bsize-4) return bbd_list_array[:num_of_bbd_blocks*4], num_of_bbd_blocks, num_of_xbbd_blocks, xbbd_start_block #",
"x_data += struct.pack('<L', last_no+1) # 다음 블록을 가리켜야 함으로 1를 더함 else: x_data",
"self.bsize = 1 << kavutil.get_uint16(self.mm, 0x1e) self.ssize = 1 << kavutil.get_uint16(self.mm, 0x20) if",
"getinfo(self) # 플러그인 엔진의 주요 정보를 알려준다. (제작자, 버전, ...) # 리턴값 :",
"self.pps[x]['Next'] != 0xffffffff: if self.pps[x]['Next'] in scaned_pps_node: self.pps[x]['Next'] = 0xffffffff else: f.append(self.pps[x]['Next']) scaned_pps_node.append(self.pps[x]['Next'])",
"# num_link : 필요로 하는 전체 링크 수 # --------------------------------------------------------------------- def __decrease_sbd_link(self, org_link_list,",
"data 뒤쪽에 추가하기 t_link = get_block_link(org_sb, self.sbd_fat) # 이전 링크 수집하기 t_link =",
"(self.bsize/4)) + (1 if (add_num % (self.bsize/4)) else 0) old_num_bbd = kavutil.get_uint32(self.mm, 0x2c)",
"생성한다. sbd_link = [] for i in range(len(self.sbd) / 4): sbd_link.append(kavutil.get_uint32(self.sbd, i*4)) #",
"self.mm[off+4:]) # --------------------------------------------------------------------- # Small Block을 주어진 개수만큼 추가한다. # num : 추가할",
"개수로 링크 줄이기 # Big block 영역에 bsize 만큼씩 Overwrite self.__write_data_to_big_block(t_data, t_link) #",
"(self.bsize/4)) else 0) old_num_bbd = kavutil.get_uint32(self.mm, 0x2c) xbbd_start_block = kavutil.get_uint32(self.mm, 0x44) num_of_xbbd_blocks =",
"# 유효한 Tree가 아니면 다음 continue t = '' t += ' -",
"for i, no in enumerate(sbd_list_array): data = sbd[i*self.bsize:(i+1)*self.bsize] off = (no + 1)",
"'Attached_Size': fsize - rsize } ret['ff_attach'] = fileformat # HWP 인가? o =",
"# SBD를 배열로 바꾸기 t_link = [] for i in range(len(self.sbd) / 4):",
"n = (len(self.bbd) / 4 - 1) - last_no if n >= num:",
"in range(len(bbd_list_array) / 4): no = kavutil.get_uint32(bbd_list_array, i * 4) data = bbd[i",
"def listvirus(self): # 진단 가능한 악성코드 리스트 vlist = list() # 리스트형 변수",
"root로 보낸다. t_no = prev_no elif prev_no == 0xffffffff and next_no != 0xffffffff:",
"(add_num % (self.bsize / 4)) else 0) if old_b_num == b_num: break else:",
"i*4) self.sbd_fat[i] = n if self.verbose: open('sbd.dmp', 'wb').write(self.sbd) print kavutil.vprint('SBD') kavutil.vprint(None, 'Start Blocks',",
"+ ('\\x00' * ((n*self.ssize) - len(data))) # 여분의 크기를 data 뒤쪽에 추가하기 #",
"t_off = off + 0x74 buf = buf[:t_off] + struct.pack('<L', start) + buf[t_off",
"+ d o.write_stream('FileHeader', d) o.close() ''' ''' # case1 o = OleFile('normal.hwp', write_mode=True,",
"filehandle # OLE 헤더와 동일 if mm[:8] == '\\xD0\\xCF\\x11\\xE0\\xA1\\xB1\\x1A\\xE1': ret['ff_ole'] = 'OLE' #",
"# 모든 데이터를 0으로 Wipe t = ow.delete(no) if t: self.init(t) # 새롭게",
"ssize self.bbd = bbd self.bbd_fat = bbd_fat self.sbd = sbd self.sbd_fat = sbd_fat",
"= (total_bbd_num - 109) total_xbbd_num = (t_num / ((self.bsize - 4) / 4))",
"- n sbd += '\\xff' * t if self.verbose: open('sbd.dm3', 'wb').write(sbd) self.__modify_sbd(sbd) #",
"--------------------------------------------------------------------- def get_bbd_list_array(buf, verbose=False): bbd_list_array = buf[0x4c:0x200] # 전체 bbd_list num_of_bbd_blocks = kavutil.get_uint32(buf,",
"kavutil.get_uint32(pps, 0x74) p['Size'] = kavutil.get_uint32(pps, 0x78) p['Valid'] = False # CVE-2012-0158 검사하기 #",
"name, delete_storage=False, reset_stream=False): for p in self.__full_list: if p['Name'] == name: no =",
"= kavutil.get_uint32(buf, 0x2c) xbbd_start_block = kavutil.get_uint32(buf, 0x44) # num_of_xbbd_blocks = kavutil.get_uint32(buf, 0x48) bsize",
">= num_of_bbd_blocks: # 범위를 벗어나면 에러 return -1 if idx <= 109: return",
"print kavutil.vprint('Property Storage') ''' print ' %-2s %-20s %4s %-8s %-8s %-8s %-8s",
"in range(len(self.sbd) / 4): t_link.append(kavutil.get_uint32(self.sbd, i * 4)) t = org_link_list[num_link:] org_link_list =",
"def DecodeStreamName(name): wch = [] och = [] for i in range(len(name) /",
"no, data): # 기존 PPS 정보를 얻는다 org_sb = self.pps[no]['Start'] org_size = self.pps[no]['Size']",
"Big Block을 담을 수 있음 b_num = (add_num / (self.bsize/4)) + (1 if",
"data + ('\\x00' * ((n * self.ssize) - len(data))) # 여분의 크기를 data",
"항상 오른쪽 노드가 큰 값임 no = pps['Next'] return no def delete(self, del_no):",
"self.verbose: open('bbd.dmp', 'wb').write(self.bbd) print kavutil.vprint('BBD') print kavutil.HexDump().Buffer(self.bbd, 0, 0x80) # Root 읽기 root_startblock",
"변수 선언 vlist.append('Exploit.OLE.CVE-2012-0158') # 진단/치료하는 악성코드 이름 등록 vlist.append('Exploit.OLE.CVE-2003-0820') vlist.append('Exploit.OLE.CVE-2003-0347') vlist.sort() return vlist",
"bbd[(no+1)*4:] no = t_link[-1] bbd = bbd[:no * 4] + '\\xfe\\xff\\xff\\xff' + bbd[(no"
] |
[
"canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) # contours cv2.namedWindow(\"contours\",cv2.WINDOW_NORMAL) cv2.imshow(\"contours\", image_contours) cv2.resizeWindow(\"contours\",(windowwidth,windowheight)) cv2.moveWindow(\"contours\",screensize[0][0]+(windowwidth+headerside)*2,0)",
"pydicom.filereader import dcmread, read_dicomdir from glob import glob import cv2 import numpy as",
"glob import glob import cv2 import numpy as np cv2.destroyAllWindows() # window prop",
"((-1440,0),(0,900)) screenwidth = screensize[0][1]-screensize[0][0] screenheight = screensize[1][1]-screensize[1][0] headertop= 30 headerbottom = 8 headerside",
"= cv2.convertScaleAbs(image_thresh) image_canny = cv2.Canny(image_thresh_uint8,100,150) # get contour im2, contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)",
"image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0), 3) # display process images # original",
"windowwidth = int((screenwidth - n * headerside*2)/ n) windowheight = int((screenheight - m",
"= screensize[1][1]-screensize[1][0] headertop= 30 headerbottom = 8 headerside = 8 n = 3",
"screensize[0][1]-screensize[0][0] screenheight = screensize[1][1]-screensize[1][0] headertop= 30 headerbottom = 8 headerside = 8 n",
"= [cv2.arcLength(cnt,True) for cnt in contours] idx_max = np.argmax(np.array(perimeter)) image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]],",
"headerbottom = 8 headerside = 8 n = 3 m = 2 windowwidth",
"hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan = np.stack([image_norm]*3,axis=-1) # get largest contour perimeter = [cv2.arcLength(cnt,True)",
"import pydicom import os from pydicom.filereader import dcmread, read_dicomdir from glob import glob",
"cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm) # canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) # contours cv2.namedWindow(\"contours\",cv2.WINDOW_NORMAL)",
"= ((-1440,0),(0,900)) screenwidth = screensize[0][1]-screensize[0][0] screenheight = screensize[1][1]-screensize[1][0] headertop= 30 headerbottom = 8",
"= ds_list[10].pixel_array # image details image_height, image_width = image.shape # image pre-processing image_norm",
"= glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list = [dcmread(filename) for filename in fps] # select image image",
"get largest contour perimeter = [cv2.arcLength(cnt,True) for cnt in contours] idx_max = np.argmax(np.array(perimeter))",
"contour perimeter = [cv2.arcLength(cnt,True) for cnt in contours] idx_max = np.argmax(np.array(perimeter)) image_contours =",
"# image pre-processing image_norm = cv2.normalize(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) # so that",
"norm_type=cv2.NORM_MINMAX) # so that can see better image_norm_uint8 = cv2.convertScaleAbs(image_norm) min_head_thresh = 10000",
"n) windowheight = int((screenheight - m * (headertop + headerbottom)) /m) # input",
"# import pydicom import os from pydicom.filereader import dcmread, read_dicomdir from glob import",
"glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list = [dcmread(filename) for filename in fps] # select image image =",
"perimeter = [cv2.arcLength(cnt,True) for cnt in contours] idx_max = np.argmax(np.array(perimeter)) image_contours = cv2.drawContours(image_norm_3chan.copy(),",
"# original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm) # canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny)",
"n = 3 m = 2 windowwidth = int((screenwidth - n * headerside*2)/",
"image_width = image.shape # image pre-processing image_norm = cv2.normalize(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX)",
"beta=65536, norm_type=cv2.NORM_MINMAX) # so that can see better image_norm_uint8 = cv2.convertScaleAbs(image_norm) min_head_thresh =",
"min_head_thresh = 10000 max_head_thresh = 65535 # get outline of head ret, image_thresh",
"plt # import pydicom import os from pydicom.filereader import dcmread, read_dicomdir from glob",
"image_height, image_width = image.shape # image pre-processing image_norm = cv2.normalize(image, dst=None, alpha=0, beta=65536,",
"# get largest contour perimeter = [cv2.arcLength(cnt,True) for cnt in contours] idx_max =",
"from glob import glob import cv2 import numpy as np cv2.destroyAllWindows() # window",
"screenheight = screensize[1][1]-screensize[1][0] headertop= 30 headerbottom = 8 headerside = 8 n =",
"= int((screenwidth - n * headerside*2)/ n) windowheight = int((screenheight - m *",
"get outline of head ret, image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8 = cv2.convertScaleAbs(image_thresh)",
"im2, contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan = np.stack([image_norm]*3,axis=-1) # get largest contour perimeter",
"m * (headertop + headerbottom)) /m) # input directory dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps",
"= 8 n = 3 m = 2 windowwidth = int((screenwidth - n",
"image pre-processing image_norm = cv2.normalize(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) # so that can",
"original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm) # canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight))",
"cv2.normalize(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) # so that can see better image_norm_uint8 =",
"n * headerside*2)/ n) windowheight = int((screenheight - m * (headertop + headerbottom))",
"- m * (headertop + headerbottom)) /m) # input directory dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\"",
"ds_list = [dcmread(filename) for filename in fps] # select image image = ds_list[10].pixel_array",
"image_norm_uint8 = cv2.convertScaleAbs(image_norm) min_head_thresh = 10000 max_head_thresh = 65535 # get outline of",
"image_canny = cv2.Canny(image_thresh_uint8,100,150) # get contour im2, contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan =",
"# get contour im2, contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan = np.stack([image_norm]*3,axis=-1) # get",
"# get outline of head ret, image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8 =",
"cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) # contours cv2.namedWindow(\"contours\",cv2.WINDOW_NORMAL) cv2.imshow(\"contours\", image_contours) cv2.resizeWindow(\"contours\",(windowwidth,windowheight)) cv2.moveWindow(\"contours\",screensize[0][0]+(windowwidth+headerside)*2,0) # cv2.waitKey(1) # cv2.destroyAllWindows()",
"headerbottom)) /m) # input directory dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps = glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list =",
"(0,65535,0), 3) # display process images # original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\",",
"cv2.THRESH_TOZERO) image_thresh_uint8 = cv2.convertScaleAbs(image_thresh) image_canny = cv2.Canny(image_thresh_uint8,100,150) # get contour im2, contours, hierarchy",
"window prop screensize = ((-1440,0),(0,900)) screenwidth = screensize[0][1]-screensize[0][0] screenheight = screensize[1][1]-screensize[1][0] headertop= 30",
"os from pydicom.filereader import dcmread, read_dicomdir from glob import glob import cv2 import",
"8 headerside = 8 n = 3 m = 2 windowwidth = int((screenwidth",
"dcmread, read_dicomdir from glob import glob import cv2 import numpy as np cv2.destroyAllWindows()",
"= 10000 max_head_thresh = 65535 # get outline of head ret, image_thresh =",
"image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) # contours cv2.namedWindow(\"contours\",cv2.WINDOW_NORMAL) cv2.imshow(\"contours\", image_contours) cv2.resizeWindow(\"contours\",(windowwidth,windowheight)) cv2.moveWindow(\"contours\",screensize[0][0]+(windowwidth+headerside)*2,0) # cv2.waitKey(1) #",
"import matplotlib.pyplot as plt # import pydicom import os from pydicom.filereader import dcmread,",
"(headertop + headerbottom)) /m) # input directory dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps = glob(os.path.join(dicom_dir,\"*.dcm\"))",
"= [dcmread(filename) for filename in fps] # select image image = ds_list[10].pixel_array #",
"= cv2.normalize(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) # so that can see better image_norm_uint8",
"display process images # original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm) # canny",
"[dcmread(filename) for filename in fps] # select image image = ds_list[10].pixel_array # image",
"screensize[1][1]-screensize[1][0] headertop= 30 headerbottom = 8 headerside = 8 n = 3 m",
"screensize = ((-1440,0),(0,900)) screenwidth = screensize[0][1]-screensize[0][0] screenheight = screensize[1][1]-screensize[1][0] headertop= 30 headerbottom =",
"pre-processing image_norm = cv2.normalize(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) # so that can see",
"= r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps = glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list = [dcmread(filename) for filename in fps] #",
"65535 # get outline of head ret, image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8",
"import numpy as np cv2.destroyAllWindows() # window prop screensize = ((-1440,0),(0,900)) screenwidth =",
"ret, image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8 = cv2.convertScaleAbs(image_thresh) image_canny = cv2.Canny(image_thresh_uint8,100,150) #",
"= screensize[0][1]-screensize[0][0] screenheight = screensize[1][1]-screensize[1][0] headertop= 30 headerbottom = 8 headerside = 8",
"numpy as np cv2.destroyAllWindows() # window prop screensize = ((-1440,0),(0,900)) screenwidth = screensize[0][1]-screensize[0][0]",
"= 3 m = 2 windowwidth = int((screenwidth - n * headerside*2)/ n)",
"cv2 import numpy as np cv2.destroyAllWindows() # window prop screensize = ((-1440,0),(0,900)) screenwidth",
"/m) # input directory dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps = glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list = [dcmread(filename)",
"np.argmax(np.array(perimeter)) image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0), 3) # display process images #",
"= np.stack([image_norm]*3,axis=-1) # get largest contour perimeter = [cv2.arcLength(cnt,True) for cnt in contours]",
"r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps = glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list = [dcmread(filename) for filename in fps] # select",
"can see better image_norm_uint8 = cv2.convertScaleAbs(image_norm) min_head_thresh = 10000 max_head_thresh = 65535 #",
"max_head_thresh = 65535 # get outline of head ret, image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh,",
"# canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) # contours cv2.namedWindow(\"contours\",cv2.WINDOW_NORMAL) cv2.imshow(\"contours\", image_contours) cv2.resizeWindow(\"contours\",(windowwidth,windowheight))",
"3) # display process images # original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm)",
"process images # original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm) # canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL)",
"cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm) # canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) # contours",
"dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) # so that can see better image_norm_uint8 = cv2.convertScaleAbs(image_norm)",
"so that can see better image_norm_uint8 = cv2.convertScaleAbs(image_norm) min_head_thresh = 10000 max_head_thresh =",
"= 65535 # get outline of head ret, image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO)",
"screenwidth = screensize[0][1]-screensize[0][0] screenheight = screensize[1][1]-screensize[1][0] headertop= 30 headerbottom = 8 headerside =",
"= cv2.Canny(image_thresh_uint8,100,150) # get contour im2, contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan = np.stack([image_norm]*3,axis=-1)",
"3 m = 2 windowwidth = int((screenwidth - n * headerside*2)/ n) windowheight",
"for cnt in contours] idx_max = np.argmax(np.array(perimeter)) image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0),",
"import os from pydicom.filereader import dcmread, read_dicomdir from glob import glob import cv2",
"= cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan = np.stack([image_norm]*3,axis=-1) # get largest contour perimeter = [cv2.arcLength(cnt,True) for",
"as plt # import pydicom import os from pydicom.filereader import dcmread, read_dicomdir from",
"fps] # select image image = ds_list[10].pixel_array # image details image_height, image_width =",
"contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan = np.stack([image_norm]*3,axis=-1) # get largest contour perimeter =",
"0, (0,65535,0), 3) # display process images # original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight))",
"headertop= 30 headerbottom = 8 headerside = 8 n = 3 m =",
"= cv2.convertScaleAbs(image_norm) min_head_thresh = 10000 max_head_thresh = 65535 # get outline of head",
"outline of head ret, image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8 = cv2.convertScaleAbs(image_thresh) image_canny",
"windowheight = int((screenheight - m * (headertop + headerbottom)) /m) # input directory",
"image = ds_list[10].pixel_array # image details image_height, image_width = image.shape # image pre-processing",
"[contours[idx_max]], 0, (0,65535,0), 3) # display process images # original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0)",
"in contours] idx_max = np.argmax(np.array(perimeter)) image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0), 3) #",
"ds_list[10].pixel_array # image details image_height, image_width = image.shape # image pre-processing image_norm =",
"= 2 windowwidth = int((screenwidth - n * headerside*2)/ n) windowheight = int((screenheight",
"= cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8 = cv2.convertScaleAbs(image_thresh) image_canny = cv2.Canny(image_thresh_uint8,100,150) # get contour",
"idx_max = np.argmax(np.array(perimeter)) image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0), 3) # display process",
"headerside*2)/ n) windowheight = int((screenheight - m * (headertop + headerbottom)) /m) #",
"input directory dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps = glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list = [dcmread(filename) for filename",
"2 windowwidth = int((screenwidth - n * headerside*2)/ n) windowheight = int((screenheight -",
"contour im2, contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan = np.stack([image_norm]*3,axis=-1) # get largest contour",
"image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm) # canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0)",
"cv2.destroyAllWindows() # window prop screensize = ((-1440,0),(0,900)) screenwidth = screensize[0][1]-screensize[0][0] screenheight = screensize[1][1]-screensize[1][0]",
"# image details image_height, image_width = image.shape # image pre-processing image_norm = cv2.normalize(image,",
"# window prop screensize = ((-1440,0),(0,900)) screenwidth = screensize[0][1]-screensize[0][0] screenheight = screensize[1][1]-screensize[1][0] headertop=",
"fps = glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list = [dcmread(filename) for filename in fps] # select image",
"pydicom import os from pydicom.filereader import dcmread, read_dicomdir from glob import glob import",
"* (headertop + headerbottom)) /m) # input directory dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps =",
"cv2.imshow(\"image_norm\", image_norm) # canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) # contours cv2.namedWindow(\"contours\",cv2.WINDOW_NORMAL) cv2.imshow(\"contours\",",
"[cv2.arcLength(cnt,True) for cnt in contours] idx_max = np.argmax(np.array(perimeter)) image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0,",
"image_norm) # canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) # contours cv2.namedWindow(\"contours\",cv2.WINDOW_NORMAL) cv2.imshow(\"contours\", image_contours)",
"cnt in contours] idx_max = np.argmax(np.array(perimeter)) image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0), 3)",
"m = 2 windowwidth = int((screenwidth - n * headerside*2)/ n) windowheight =",
"+ headerbottom)) /m) # input directory dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps = glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list",
"largest contour perimeter = [cv2.arcLength(cnt,True) for cnt in contours] idx_max = np.argmax(np.array(perimeter)) image_contours",
"in fps] # select image image = ds_list[10].pixel_array # image details image_height, image_width",
"cv2.convertScaleAbs(image_thresh) image_canny = cv2.Canny(image_thresh_uint8,100,150) # get contour im2, contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan",
"dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps = glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list = [dcmread(filename) for filename in fps]",
"max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8 = cv2.convertScaleAbs(image_thresh) image_canny = cv2.Canny(image_thresh_uint8,100,150) # get contour im2, contours,",
"select image image = ds_list[10].pixel_array # image details image_height, image_width = image.shape #",
"import cv2 import numpy as np cv2.destroyAllWindows() # window prop screensize = ((-1440,0),(0,900))",
"better image_norm_uint8 = cv2.convertScaleAbs(image_norm) min_head_thresh = 10000 max_head_thresh = 65535 # get outline",
"10000 max_head_thresh = 65535 # get outline of head ret, image_thresh = cv2.threshold(image_norm,min_head_thresh,",
"get contour im2, contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan = np.stack([image_norm]*3,axis=-1) # get largest",
"head ret, image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8 = cv2.convertScaleAbs(image_thresh) image_canny = cv2.Canny(image_thresh_uint8,100,150)",
"= image.shape # image pre-processing image_norm = cv2.normalize(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) #",
"prop screensize = ((-1440,0),(0,900)) screenwidth = screensize[0][1]-screensize[0][0] screenheight = screensize[1][1]-screensize[1][0] headertop= 30 headerbottom",
"= np.argmax(np.array(perimeter)) image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0), 3) # display process images",
"cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8 = cv2.convertScaleAbs(image_thresh) image_canny = cv2.Canny(image_thresh_uint8,100,150) # get contour im2,",
"cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm) # canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) #",
"for filename in fps] # select image image = ds_list[10].pixel_array # image details",
"int((screenheight - m * (headertop + headerbottom)) /m) # input directory dicom_dir =",
"cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0), 3) # display process images # original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL)",
"cv2.Canny(image_thresh_uint8,100,150) # get contour im2, contours, hierarchy = cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan = np.stack([image_norm]*3,axis=-1) #",
"of head ret, image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8 = cv2.convertScaleAbs(image_thresh) image_canny =",
"np.stack([image_norm]*3,axis=-1) # get largest contour perimeter = [cv2.arcLength(cnt,True) for cnt in contours] idx_max",
"read_dicomdir from glob import glob import cv2 import numpy as np cv2.destroyAllWindows() #",
"= int((screenheight - m * (headertop + headerbottom)) /m) # input directory dicom_dir",
"filename in fps] # select image image = ds_list[10].pixel_array # image details image_height,",
"# select image image = ds_list[10].pixel_array # image details image_height, image_width = image.shape",
"see better image_norm_uint8 = cv2.convertScaleAbs(image_norm) min_head_thresh = 10000 max_head_thresh = 65535 # get",
"import dcmread, read_dicomdir from glob import glob import cv2 import numpy as np",
"as np cv2.destroyAllWindows() # window prop screensize = ((-1440,0),(0,900)) screenwidth = screensize[0][1]-screensize[0][0] screenheight",
"cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) # contours cv2.namedWindow(\"contours\",cv2.WINDOW_NORMAL) cv2.imshow(\"contours\", image_contours) cv2.resizeWindow(\"contours\",(windowwidth,windowheight)) cv2.moveWindow(\"contours\",screensize[0][0]+(windowwidth+headerside)*2,0) #",
"cv2.imshow(\"image_canny\", image_canny) cv2.resizeWindow(\"image_canny\",(windowwidth,windowheight)) cv2.moveWindow(\"image_canny\",screensize[0][0]+(windowwidth+headerside*2),0) # contours cv2.namedWindow(\"contours\",cv2.WINDOW_NORMAL) cv2.imshow(\"contours\", image_contours) cv2.resizeWindow(\"contours\",(windowwidth,windowheight)) cv2.moveWindow(\"contours\",screensize[0][0]+(windowwidth+headerside)*2,0) # cv2.waitKey(1)",
"image details image_height, image_width = image.shape # image pre-processing image_norm = cv2.normalize(image, dst=None,",
"* headerside*2)/ n) windowheight = int((screenheight - m * (headertop + headerbottom)) /m)",
"int((screenwidth - n * headerside*2)/ n) windowheight = int((screenheight - m * (headertop",
"details image_height, image_width = image.shape # image pre-processing image_norm = cv2.normalize(image, dst=None, alpha=0,",
"# so that can see better image_norm_uint8 = cv2.convertScaleAbs(image_norm) min_head_thresh = 10000 max_head_thresh",
"images # original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm) # canny cv2.namedWindow(\"image_canny\",cv2.WINDOW_NORMAL) cv2.imshow(\"image_canny\",",
"glob import cv2 import numpy as np cv2.destroyAllWindows() # window prop screensize =",
"- n * headerside*2)/ n) windowheight = int((screenheight - m * (headertop +",
"cv2.findContours(image_canny,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE) image_norm_3chan = np.stack([image_norm]*3,axis=-1) # get largest contour perimeter = [cv2.arcLength(cnt,True) for cnt",
"import glob import cv2 import numpy as np cv2.destroyAllWindows() # window prop screensize",
"image_norm_3chan = np.stack([image_norm]*3,axis=-1) # get largest contour perimeter = [cv2.arcLength(cnt,True) for cnt in",
"alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) # so that can see better image_norm_uint8 = cv2.convertScaleAbs(image_norm) min_head_thresh",
"image_norm = cv2.normalize(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) # so that can see better",
"contours] idx_max = np.argmax(np.array(perimeter)) image_contours = cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0), 3) # display",
"cv2.convertScaleAbs(image_norm) min_head_thresh = 10000 max_head_thresh = 65535 # get outline of head ret,",
"# display process images # original image cv2.namedWindow(\"image_norm\",cv2.WINDOW_NORMAL) cv2.moveWindow(\"image_norm\",screensize[0][0],0) cv2.resizeWindow(\"image_norm\",(windowwidth,windowheight)) cv2.imshow(\"image_norm\", image_norm) #",
"8 n = 3 m = 2 windowwidth = int((screenwidth - n *",
"directory dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps = glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list = [dcmread(filename) for filename in",
"image.shape # image pre-processing image_norm = cv2.normalize(image, dst=None, alpha=0, beta=65536, norm_type=cv2.NORM_MINMAX) # so",
"headerside = 8 n = 3 m = 2 windowwidth = int((screenwidth -",
"that can see better image_norm_uint8 = cv2.convertScaleAbs(image_norm) min_head_thresh = 10000 max_head_thresh = 65535",
"= 8 headerside = 8 n = 3 m = 2 windowwidth =",
"# input directory dicom_dir = r\"E:\\BTSynchSGH\\datasets\\necklysis\\input\\dicom\" fps = glob(os.path.join(dicom_dir,\"*.dcm\")) ds_list = [dcmread(filename) for",
"image image = ds_list[10].pixel_array # image details image_height, image_width = image.shape # image",
"= cv2.drawContours(image_norm_3chan.copy(), [contours[idx_max]], 0, (0,65535,0), 3) # display process images # original image",
"image_thresh_uint8 = cv2.convertScaleAbs(image_thresh) image_canny = cv2.Canny(image_thresh_uint8,100,150) # get contour im2, contours, hierarchy =",
"matplotlib.pyplot as plt # import pydicom import os from pydicom.filereader import dcmread, read_dicomdir",
"30 headerbottom = 8 headerside = 8 n = 3 m = 2",
"from pydicom.filereader import dcmread, read_dicomdir from glob import glob import cv2 import numpy",
"np cv2.destroyAllWindows() # window prop screensize = ((-1440,0),(0,900)) screenwidth = screensize[0][1]-screensize[0][0] screenheight =",
"image_thresh = cv2.threshold(image_norm,min_head_thresh, max_head_thresh, cv2.THRESH_TOZERO) image_thresh_uint8 = cv2.convertScaleAbs(image_thresh) image_canny = cv2.Canny(image_thresh_uint8,100,150) # get"
] |
[
"as sock: sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT = 6 process_list = [multiprocessing.Process(target=worker, args=(sock,)) for",
"import socket import threading import multiprocessing import os def worker(sock): while True: conn,",
"while True: data = conn.recv(1024) if not data: break print(data.decode(\"utf-8\")) if __name__ ==",
"print(data.decode(\"utf-8\")) if __name__ == \"__main__\": with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN)",
"data: break print(data.decode(\"utf-8\")) if __name__ == \"__main__\": with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.bind((\"127.0.0.1\",",
"def process_request(conn, addr): print(\"addr: \", addr) with conn: while True: data = conn.recv(1024)",
"if __name__ == \"__main__\": with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT",
"worker(sock): while True: conn, addr = sock.accept() print(\"PID:\", os.getpid()) thread = threading.Thread(target=process_request, args=(conn,",
"= 6 process_list = [multiprocessing.Process(target=worker, args=(sock,)) for _ in range(PROCESS_COUNT)] for process in",
"while True: conn, addr = sock.accept() print(\"PID:\", os.getpid()) thread = threading.Thread(target=process_request, args=(conn, addr))",
"\", addr) with conn: while True: data = conn.recv(1024) if not data: break",
"sock: sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT = 6 process_list = [multiprocessing.Process(target=worker, args=(sock,)) for _",
"True: data = conn.recv(1024) if not data: break print(data.decode(\"utf-8\")) if __name__ == \"__main__\":",
"os def worker(sock): while True: conn, addr = sock.accept() print(\"PID:\", os.getpid()) thread =",
"True: conn, addr = sock.accept() print(\"PID:\", os.getpid()) thread = threading.Thread(target=process_request, args=(conn, addr)) thread.start()",
"sock.listen(socket.SOMAXCONN) PROCESS_COUNT = 6 process_list = [multiprocessing.Process(target=worker, args=(sock,)) for _ in range(PROCESS_COUNT)] for",
"= [multiprocessing.Process(target=worker, args=(sock,)) for _ in range(PROCESS_COUNT)] for process in process_list: process.start() for",
"process_request(conn, addr): print(\"addr: \", addr) with conn: while True: data = conn.recv(1024) if",
"with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT = 6 process_list =",
"import os def worker(sock): while True: conn, addr = sock.accept() print(\"PID:\", os.getpid()) thread",
"= threading.Thread(target=process_request, args=(conn, addr)) thread.start() def process_request(conn, addr): print(\"addr: \", addr) with conn:",
"= conn.recv(1024) if not data: break print(data.decode(\"utf-8\")) if __name__ == \"__main__\": with socket.socket(socket.AF_INET,",
"thread.start() def process_request(conn, addr): print(\"addr: \", addr) with conn: while True: data =",
"threading.Thread(target=process_request, args=(conn, addr)) thread.start() def process_request(conn, addr): print(\"addr: \", addr) with conn: while",
"\"__main__\": with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT = 6 process_list",
"args=(sock,)) for _ in range(PROCESS_COUNT)] for process in process_list: process.start() for process in",
"args=(conn, addr)) thread.start() def process_request(conn, addr): print(\"addr: \", addr) with conn: while True:",
"import threading import multiprocessing import os def worker(sock): while True: conn, addr =",
"multiprocessing import os def worker(sock): while True: conn, addr = sock.accept() print(\"PID:\", os.getpid())",
"conn: while True: data = conn.recv(1024) if not data: break print(data.decode(\"utf-8\")) if __name__",
"6 process_list = [multiprocessing.Process(target=worker, args=(sock,)) for _ in range(PROCESS_COUNT)] for process in process_list:",
"sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT = 6 process_list = [multiprocessing.Process(target=worker, args=(sock,)) for _ in",
"for _ in range(PROCESS_COUNT)] for process in process_list: process.start() for process in process_list:",
"socket.SOCK_STREAM) as sock: sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT = 6 process_list = [multiprocessing.Process(target=worker, args=(sock,))",
"def worker(sock): while True: conn, addr = sock.accept() print(\"PID:\", os.getpid()) thread = threading.Thread(target=process_request,",
"process_list = [multiprocessing.Process(target=worker, args=(sock,)) for _ in range(PROCESS_COUNT)] for process in process_list: process.start()",
"not data: break print(data.decode(\"utf-8\")) if __name__ == \"__main__\": with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:",
"conn, addr = sock.accept() print(\"PID:\", os.getpid()) thread = threading.Thread(target=process_request, args=(conn, addr)) thread.start() def",
"10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT = 6 process_list = [multiprocessing.Process(target=worker, args=(sock,)) for _ in range(PROCESS_COUNT)]",
"PROCESS_COUNT = 6 process_list = [multiprocessing.Process(target=worker, args=(sock,)) for _ in range(PROCESS_COUNT)] for process",
"[multiprocessing.Process(target=worker, args=(sock,)) for _ in range(PROCESS_COUNT)] for process in process_list: process.start() for process",
"addr = sock.accept() print(\"PID:\", os.getpid()) thread = threading.Thread(target=process_request, args=(conn, addr)) thread.start() def process_request(conn,",
"print(\"PID:\", os.getpid()) thread = threading.Thread(target=process_request, args=(conn, addr)) thread.start() def process_request(conn, addr): print(\"addr: \",",
"addr)) thread.start() def process_request(conn, addr): print(\"addr: \", addr) with conn: while True: data",
"break print(data.decode(\"utf-8\")) if __name__ == \"__main__\": with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.bind((\"127.0.0.1\", 10001))",
"import multiprocessing import os def worker(sock): while True: conn, addr = sock.accept() print(\"PID:\",",
"threading import multiprocessing import os def worker(sock): while True: conn, addr = sock.accept()",
"__name__ == \"__main__\": with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT =",
"socket import threading import multiprocessing import os def worker(sock): while True: conn, addr",
"thread = threading.Thread(target=process_request, args=(conn, addr)) thread.start() def process_request(conn, addr): print(\"addr: \", addr) with",
"= sock.accept() print(\"PID:\", os.getpid()) thread = threading.Thread(target=process_request, args=(conn, addr)) thread.start() def process_request(conn, addr):",
"os.getpid()) thread = threading.Thread(target=process_request, args=(conn, addr)) thread.start() def process_request(conn, addr): print(\"addr: \", addr)",
"socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT = 6 process_list = [multiprocessing.Process(target=worker,",
"== \"__main__\": with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: sock.bind((\"127.0.0.1\", 10001)) sock.listen(socket.SOMAXCONN) PROCESS_COUNT = 6",
"addr): print(\"addr: \", addr) with conn: while True: data = conn.recv(1024) if not",
"_ in range(PROCESS_COUNT)] for process in process_list: process.start() for process in process_list: process.join()",
"addr) with conn: while True: data = conn.recv(1024) if not data: break print(data.decode(\"utf-8\"))",
"data = conn.recv(1024) if not data: break print(data.decode(\"utf-8\")) if __name__ == \"__main__\": with",
"if not data: break print(data.decode(\"utf-8\")) if __name__ == \"__main__\": with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as",
"with conn: while True: data = conn.recv(1024) if not data: break print(data.decode(\"utf-8\")) if",
"print(\"addr: \", addr) with conn: while True: data = conn.recv(1024) if not data:",
"sock.accept() print(\"PID:\", os.getpid()) thread = threading.Thread(target=process_request, args=(conn, addr)) thread.start() def process_request(conn, addr): print(\"addr:",
"conn.recv(1024) if not data: break print(data.decode(\"utf-8\")) if __name__ == \"__main__\": with socket.socket(socket.AF_INET, socket.SOCK_STREAM)"
] |
[
"0.1, 0.1) fig = plt.figure(figsize=(8, 6)) map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \\",
"= plt.figure(figsize=(8, 6)) map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \\ resolution='f') map.drawcoastlines() map.drawparallels(parallels,",
"urcrnrlon=lon1, urcrnrlat=lat1, \\ resolution='f') map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip = 4 for",
"import glob import matplotlib.pyplot as plt import matplotlib.cm as cm import numpy as",
"mpl_toolkits.basemap import Basemap import gc import matplotlib matplotlib.rc('font', size=12) data_path = 'processed_netcdf' multibeam_files",
"map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip = 4 for f in multibeam_files: print('Plotting ', f) ds",
"xr.open_dataset(f) lon = np.array(ds.longitude[::skip,::skip]) lat = np.array(ds.latitude[::skip,::skip]) depth = np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat, depth,",
"map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \\ resolution='f') map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians))",
"map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip = 4 for f in multibeam_files: print('Plotting",
"f in multibeam_files: print('Plotting ', f) ds = xr.open_dataset(f) lon = np.array(ds.longitude[::skip,::skip]) lat",
"37. parallels = np.arange(lat0, lat1 + 0.1, 0.1) meridians = np.arange(lon0, lon1 +",
"-121.7 lat0, lat1 = 36.6, 37. parallels = np.arange(lat0, lat1 + 0.1, 0.1)",
"= np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r) del lon, lat, depth, ds",
"depth, ds gc.collect() plt.colorbar() fig.suptitle('Monterey Bay bathymetry from shipboard Multibeam EM-712') plt.savefig('monterey_bay_multibeam_bathymetry.png', dpi=300)",
"matplotlib.rc('font', size=12) data_path = 'processed_netcdf' multibeam_files = glob.glob(data_path + '/*.nc') multibeam_files.sort() lon0, lon1",
"matplotlib.cm as cm import numpy as np import xarray as xr from mpl_toolkits.basemap",
"plt import matplotlib.cm as cm import numpy as np import xarray as xr",
"Basemap import gc import matplotlib matplotlib.rc('font', size=12) data_path = 'processed_netcdf' multibeam_files = glob.glob(data_path",
"gc import matplotlib matplotlib.rc('font', size=12) data_path = 'processed_netcdf' multibeam_files = glob.glob(data_path + '/*.nc')",
"+ 0.1, 0.1) meridians = np.arange(lon0, lon1 + 0.1, 0.1) fig = plt.figure(figsize=(8,",
"llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \\ resolution='f') map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip = 4",
"import matplotlib.pyplot as plt import matplotlib.cm as cm import numpy as np import",
"= xr.open_dataset(f) lon = np.array(ds.longitude[::skip,::skip]) lat = np.array(ds.latitude[::skip,::skip]) depth = np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat,",
"= Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \\ resolution='f') map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip",
"36.6, 37. parallels = np.arange(lat0, lat1 + 0.1, 0.1) meridians = np.arange(lon0, lon1",
"multibeam_files = glob.glob(data_path + '/*.nc') multibeam_files.sort() lon0, lon1 = -122.2, -121.7 lat0, lat1",
"import xarray as xr from mpl_toolkits.basemap import Basemap import gc import matplotlib matplotlib.rc('font',",
"import numpy as np import xarray as xr from mpl_toolkits.basemap import Basemap import",
"as cm import numpy as np import xarray as xr from mpl_toolkits.basemap import",
"-122.2, -121.7 lat0, lat1 = 36.6, 37. parallels = np.arange(lat0, lat1 + 0.1,",
"plt.figure(figsize=(8, 6)) map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \\ resolution='f') map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels))",
"lat1 + 0.1, 0.1) meridians = np.arange(lon0, lon1 + 0.1, 0.1) fig =",
"ds = xr.open_dataset(f) lon = np.array(ds.longitude[::skip,::skip]) lat = np.array(ds.latitude[::skip,::skip]) depth = np.array(ds.depth[::skip,::skip]) plt.pcolor(lon,",
"in multibeam_files: print('Plotting ', f) ds = xr.open_dataset(f) lon = np.array(ds.longitude[::skip,::skip]) lat =",
"xarray as xr from mpl_toolkits.basemap import Basemap import gc import matplotlib matplotlib.rc('font', size=12)",
"import matplotlib matplotlib.rc('font', size=12) data_path = 'processed_netcdf' multibeam_files = glob.glob(data_path + '/*.nc') multibeam_files.sort()",
"np.array(ds.latitude[::skip,::skip]) depth = np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r) del lon, lat,",
"vmin=0, vmax=100, cmap=cm.viridis_r) del lon, lat, depth, ds gc.collect() plt.colorbar() fig.suptitle('Monterey Bay bathymetry",
"= np.array(ds.latitude[::skip,::skip]) depth = np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r) del lon,",
"lon = np.array(ds.longitude[::skip,::skip]) lat = np.array(ds.latitude[::skip,::skip]) depth = np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat, depth, vmin=0,",
"matplotlib.pyplot as plt import matplotlib.cm as cm import numpy as np import xarray",
"np import xarray as xr from mpl_toolkits.basemap import Basemap import gc import matplotlib",
"as plt import matplotlib.cm as cm import numpy as np import xarray as",
"', f) ds = xr.open_dataset(f) lon = np.array(ds.longitude[::skip,::skip]) lat = np.array(ds.latitude[::skip,::skip]) depth =",
"meridians = np.arange(lon0, lon1 + 0.1, 0.1) fig = plt.figure(figsize=(8, 6)) map =",
"fig = plt.figure(figsize=(8, 6)) map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \\ resolution='f') map.drawcoastlines()",
"np.array(ds.longitude[::skip,::skip]) lat = np.array(ds.latitude[::skip,::skip]) depth = np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r)",
"+ 0.1, 0.1) fig = plt.figure(figsize=(8, 6)) map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1,",
"\\ resolution='f') map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip = 4 for f in",
"4 for f in multibeam_files: print('Plotting ', f) ds = xr.open_dataset(f) lon =",
"ds gc.collect() plt.colorbar() fig.suptitle('Monterey Bay bathymetry from shipboard Multibeam EM-712') plt.savefig('monterey_bay_multibeam_bathymetry.png', dpi=300) plt.close(fig)",
"multibeam_files: print('Plotting ', f) ds = xr.open_dataset(f) lon = np.array(ds.longitude[::skip,::skip]) lat = np.array(ds.latitude[::skip,::skip])",
"'/*.nc') multibeam_files.sort() lon0, lon1 = -122.2, -121.7 lat0, lat1 = 36.6, 37. parallels",
"skip = 4 for f in multibeam_files: print('Plotting ', f) ds = xr.open_dataset(f)",
"multibeam_files.sort() lon0, lon1 = -122.2, -121.7 lat0, lat1 = 36.6, 37. parallels =",
"labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip = 4 for f in multibeam_files: print('Plotting ', f)",
"parallels = np.arange(lat0, lat1 + 0.1, 0.1) meridians = np.arange(lon0, lon1 + 0.1,",
"= 36.6, 37. parallels = np.arange(lat0, lat1 + 0.1, 0.1) meridians = np.arange(lon0,",
"size=12) data_path = 'processed_netcdf' multibeam_files = glob.glob(data_path + '/*.nc') multibeam_files.sort() lon0, lon1 =",
"glob.glob(data_path + '/*.nc') multibeam_files.sort() lon0, lon1 = -122.2, -121.7 lat0, lat1 = 36.6,",
"lat0, lat1 = 36.6, 37. parallels = np.arange(lat0, lat1 + 0.1, 0.1) meridians",
"lat = np.array(ds.latitude[::skip,::skip]) depth = np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r) del",
"'processed_netcdf' multibeam_files = glob.glob(data_path + '/*.nc') multibeam_files.sort() lon0, lon1 = -122.2, -121.7 lat0,",
"lat1 = 36.6, 37. parallels = np.arange(lat0, lat1 + 0.1, 0.1) meridians =",
"import matplotlib.cm as cm import numpy as np import xarray as xr from",
"depth = np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r) del lon, lat, depth,",
"from mpl_toolkits.basemap import Basemap import gc import matplotlib matplotlib.rc('font', size=12) data_path = 'processed_netcdf'",
"0.1, 0.1) meridians = np.arange(lon0, lon1 + 0.1, 0.1) fig = plt.figure(figsize=(8, 6))",
"urcrnrlat=lat1, \\ resolution='f') map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip = 4 for f",
"cmap=cm.viridis_r) del lon, lat, depth, ds gc.collect() plt.colorbar() fig.suptitle('Monterey Bay bathymetry from shipboard",
"xr from mpl_toolkits.basemap import Basemap import gc import matplotlib matplotlib.rc('font', size=12) data_path =",
"map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip = 4 for f in multibeam_files: print('Plotting ',",
"Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \\ resolution='f') map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip =",
"resolution='f') map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians, labels=~np.isnan(meridians)) skip = 4 for f in multibeam_files:",
"6)) map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \\ resolution='f') map.drawcoastlines() map.drawparallels(parallels, labels=~np.isnan(parallels)) map.drawmeridians(meridians,",
"np.arange(lat0, lat1 + 0.1, 0.1) meridians = np.arange(lon0, lon1 + 0.1, 0.1) fig",
"print('Plotting ', f) ds = xr.open_dataset(f) lon = np.array(ds.longitude[::skip,::skip]) lat = np.array(ds.latitude[::skip,::skip]) depth",
"glob import matplotlib.pyplot as plt import matplotlib.cm as cm import numpy as np",
"import Basemap import gc import matplotlib matplotlib.rc('font', size=12) data_path = 'processed_netcdf' multibeam_files =",
"np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r) del lon, lat, depth, ds gc.collect()",
"as xr from mpl_toolkits.basemap import Basemap import gc import matplotlib matplotlib.rc('font', size=12) data_path",
"= np.array(ds.longitude[::skip,::skip]) lat = np.array(ds.latitude[::skip,::skip]) depth = np.array(ds.depth[::skip,::skip]) plt.pcolor(lon, lat, depth, vmin=0, vmax=100,",
"depth, vmin=0, vmax=100, cmap=cm.viridis_r) del lon, lat, depth, ds gc.collect() plt.colorbar() fig.suptitle('Monterey Bay",
"+ '/*.nc') multibeam_files.sort() lon0, lon1 = -122.2, -121.7 lat0, lat1 = 36.6, 37.",
"plt.pcolor(lon, lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r) del lon, lat, depth, ds gc.collect() plt.colorbar()",
"= 4 for f in multibeam_files: print('Plotting ', f) ds = xr.open_dataset(f) lon",
"data_path = 'processed_netcdf' multibeam_files = glob.glob(data_path + '/*.nc') multibeam_files.sort() lon0, lon1 = -122.2,",
"matplotlib matplotlib.rc('font', size=12) data_path = 'processed_netcdf' multibeam_files = glob.glob(data_path + '/*.nc') multibeam_files.sort() lon0,",
"as np import xarray as xr from mpl_toolkits.basemap import Basemap import gc import",
"= np.arange(lon0, lon1 + 0.1, 0.1) fig = plt.figure(figsize=(8, 6)) map = Basemap(llcrnrlon=lon0,",
"f) ds = xr.open_dataset(f) lon = np.array(ds.longitude[::skip,::skip]) lat = np.array(ds.latitude[::skip,::skip]) depth = np.array(ds.depth[::skip,::skip])",
"import gc import matplotlib matplotlib.rc('font', size=12) data_path = 'processed_netcdf' multibeam_files = glob.glob(data_path +",
"labels=~np.isnan(meridians)) skip = 4 for f in multibeam_files: print('Plotting ', f) ds =",
"numpy as np import xarray as xr from mpl_toolkits.basemap import Basemap import gc",
"lon1 + 0.1, 0.1) fig = plt.figure(figsize=(8, 6)) map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1,",
"= glob.glob(data_path + '/*.nc') multibeam_files.sort() lon0, lon1 = -122.2, -121.7 lat0, lat1 =",
"vmax=100, cmap=cm.viridis_r) del lon, lat, depth, ds gc.collect() plt.colorbar() fig.suptitle('Monterey Bay bathymetry from",
"0.1) meridians = np.arange(lon0, lon1 + 0.1, 0.1) fig = plt.figure(figsize=(8, 6)) map",
"np.arange(lon0, lon1 + 0.1, 0.1) fig = plt.figure(figsize=(8, 6)) map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0,",
"lat, depth, vmin=0, vmax=100, cmap=cm.viridis_r) del lon, lat, depth, ds gc.collect() plt.colorbar() fig.suptitle('Monterey",
"del lon, lat, depth, ds gc.collect() plt.colorbar() fig.suptitle('Monterey Bay bathymetry from shipboard Multibeam",
"= 'processed_netcdf' multibeam_files = glob.glob(data_path + '/*.nc') multibeam_files.sort() lon0, lon1 = -122.2, -121.7",
"0.1) fig = plt.figure(figsize=(8, 6)) map = Basemap(llcrnrlon=lon0, llcrnrlat=lat0, urcrnrlon=lon1, urcrnrlat=lat1, \\ resolution='f')",
"for f in multibeam_files: print('Plotting ', f) ds = xr.open_dataset(f) lon = np.array(ds.longitude[::skip,::skip])",
"lon1 = -122.2, -121.7 lat0, lat1 = 36.6, 37. parallels = np.arange(lat0, lat1",
"lon0, lon1 = -122.2, -121.7 lat0, lat1 = 36.6, 37. parallels = np.arange(lat0,",
"= np.arange(lat0, lat1 + 0.1, 0.1) meridians = np.arange(lon0, lon1 + 0.1, 0.1)",
"cm import numpy as np import xarray as xr from mpl_toolkits.basemap import Basemap",
"= -122.2, -121.7 lat0, lat1 = 36.6, 37. parallels = np.arange(lat0, lat1 +",
"lon, lat, depth, ds gc.collect() plt.colorbar() fig.suptitle('Monterey Bay bathymetry from shipboard Multibeam EM-712')",
"lat, depth, ds gc.collect() plt.colorbar() fig.suptitle('Monterey Bay bathymetry from shipboard Multibeam EM-712') plt.savefig('monterey_bay_multibeam_bathymetry.png',"
] |
[
"+ '_'.join(['%s:%s' % (k, v) for k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc",
"new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): node_size_label = '%d' %",
"= [v for v in dict(subgraph.degree()).values()] new_signature = signature + '_max_node_degree_%d' % max(labels)",
"graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def rlbest(*args, **kargs): return decompose_relabel_estimator(*args, **kargs) def rlbmdgr(*args,",
"interface.\"\"\" from ego.component import GraphComponent from collections import Counter def decompose_relabel_node_size(graph_component): new_subgraphs_list =",
"% subgraph.number_of_nodes() new_signature = signature + '_node_size_' + node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc =",
"[] new_signatures_list = [] preds = graph_estimator.predict(graph_component.subgraphs) for subgraph, signature, pred in zip(graph_component.subgraphs,",
"for subgraph, signature, pred in zip(graph_component.subgraphs, graph_component.signatures, preds): new_signature = signature + '_estimator_%s'",
"Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_max_node_degree(graph_component):",
"new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def rlbest(*args, **kargs): return",
"labels = [v for v in dict(subgraph.degree()).values()] new_signature = signature + '_max_node_degree_%d' %",
"graph_estimator=None): new_subgraphs_list = [] new_signatures_list = [] preds = graph_estimator.predict(graph_component.subgraphs) for subgraph, signature,",
"def rlbnod(*args, **kargs): return decompose_relabel_distinct_node_labels(*args, **kargs) def rlblfrq(*args, **kargs): return decompose_relabel_node_label_frequency(*args, **kargs) def",
"zip(graph_component.subgraphs, graph_component.signatures): node_size_label = '%d' % subgraph.number_of_nodes() new_signature = signature + '_node_size_' +",
"#!/usr/bin/env python \"\"\"Provides scikit interface.\"\"\" from ego.component import GraphComponent from collections import Counter",
"signature + '_max_node_degree_%d' % max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list)",
"return gc def rlbest(*args, **kargs): return decompose_relabel_estimator(*args, **kargs) def rlbmdgr(*args, **kargs): return decompose_relabel_max_node_degree(*args,",
"new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list",
"python \"\"\"Provides scikit interface.\"\"\" from ego.component import GraphComponent from collections import Counter def",
"preds = graph_estimator.predict(graph_component.subgraphs) for subgraph, signature, pred in zip(graph_component.subgraphs, graph_component.signatures, preds): new_signature =",
"new_signature = signature + '_max_node_degree_%d' % max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph,",
"graph_component.signatures): node_size_label = '%d' % subgraph.number_of_nodes() new_signature = signature + '_node_size_' + node_size_label",
"= [] preds = graph_estimator.predict(graph_component.subgraphs) for subgraph, signature, pred in zip(graph_component.subgraphs, graph_component.signatures, preds):",
"decompose_relabel_node_size(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures):",
"+ '_estimator_%s' % pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return",
"subgraph.nodes()] new_signature = signature + '_distinct_node_labels_%d' % len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent(",
"new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list =",
"v in dict(subgraph.degree()).values()] new_signature = signature + '_node_degree_frequency_' + '_'.join(['%s:%s' % (k, v)",
"return gc def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature",
"return decompose_relabel_node_degree_frequency(*args, **kargs) def rlbnod(*args, **kargs): return decompose_relabel_distinct_node_labels(*args, **kargs) def rlblfrq(*args, **kargs): return",
"= signature + '_estimator_%s' % pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list,",
"GraphComponent from collections import Counter def decompose_relabel_node_size(graph_component): new_subgraphs_list = [] new_signatures_list = []",
"= '%d' % subgraph.number_of_nodes() new_signature = signature + '_node_size_' + node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature)",
"'_max_node_degree_%d' % max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc",
"[] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [v for v in",
"decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list = [] new_signatures_list = [] preds = graph_estimator.predict(graph_component.subgraphs) for subgraph,",
"new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list =",
"k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return",
"% pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def",
"new_subgraphs_list = [] new_signatures_list = [] preds = graph_estimator.predict(graph_component.subgraphs) for subgraph, signature, pred",
"def decompose_relabel_node_size(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs,",
"**kargs) def rlbmdgr(*args, **kargs): return decompose_relabel_max_node_degree(*args, **kargs) def rlbdfrq(*args, **kargs): return decompose_relabel_node_degree_frequency(*args, **kargs)",
"in zip(graph_component.subgraphs, graph_component.signatures): labels = [subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature = signature",
"signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature =",
"[subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature = signature + '_node_label_frequency_' + '_'.join(['%s:%s' %",
"graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = []",
"signatures=new_signatures_list) return gc def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph,",
"in zip(graph_component.subgraphs, graph_component.signatures): node_size_label = '%d' % subgraph.number_of_nodes() new_signature = signature + '_node_size_'",
"labels = [subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature = signature + '_distinct_node_labels_%d' %",
"new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list",
"'_node_degree_frequency_' + '_'.join(['%s:%s' % (k, v) for k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature)",
"decompose_relabel_node_degree_frequency(*args, **kargs) def rlbnod(*args, **kargs): return decompose_relabel_distinct_node_labels(*args, **kargs) def rlblfrq(*args, **kargs): return decompose_relabel_node_label_frequency(*args,",
"gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list =",
"def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs,",
"v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc",
"subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for",
"= signature + '_node_degree_frequency_' + '_'.join(['%s:%s' % (k, v) for k, v in",
"= GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list = []",
"subgraph, signature, pred in zip(graph_component.subgraphs, graph_component.signatures, preds): new_signature = signature + '_estimator_%s' %",
"new_signature = signature + '_node_size_' + node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph,",
"new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels",
"= [v for v in dict(subgraph.degree()).values()] new_signature = signature + '_node_degree_frequency_' + '_'.join(['%s:%s'",
"new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list",
"gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list = []",
"subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list = [] new_signatures_list = [] for",
"decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures):",
"graph_component.signatures): labels = [v for v in dict(subgraph.degree()).values()] new_signature = signature + '_max_node_degree_%d'",
"for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [subgraph.nodes[u]['label'] for u in subgraph.nodes()]",
"new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [subgraph.nodes[u]['label'] for",
"[] preds = graph_estimator.predict(graph_component.subgraphs) for subgraph, signature, pred in zip(graph_component.subgraphs, graph_component.signatures, preds): new_signature",
"+ '_max_node_degree_%d' % max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return",
"u in subgraph.nodes()] new_signature = signature + '_distinct_node_labels_%d' % len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc",
"subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [v for v in dict(subgraph.degree()).values()] new_signature",
"subgraph.number_of_nodes() new_signature = signature + '_node_size_' + node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent(",
"new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def rlbest(*args, **kargs):",
"GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def rlbest(*args, **kargs): return decompose_relabel_estimator(*args, **kargs) def",
"decompose_relabel_max_node_degree(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures):",
"return gc def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature",
"graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = []",
"for v in dict(subgraph.degree()).values()] new_signature = signature + '_max_node_degree_%d' % max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature)",
"= signature + '_max_node_degree_%d' % max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list,",
"(k, v) for k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph,",
"dict(subgraph.degree()).values()] new_signature = signature + '_max_node_degree_%d' % max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent(",
"gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list = []",
"for k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list)",
"GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list = [] new_signatures_list =",
"return decompose_relabel_max_node_degree(*args, **kargs) def rlbdfrq(*args, **kargs): return decompose_relabel_node_degree_frequency(*args, **kargs) def rlbnod(*args, **kargs): return",
"new_signature = signature + '_node_label_frequency_' + '_'.join(['%s:%s' % (k, v) for k, v",
"scikit interface.\"\"\" from ego.component import GraphComponent from collections import Counter def decompose_relabel_node_size(graph_component): new_subgraphs_list",
"decompose_relabel_max_node_degree(*args, **kargs) def rlbdfrq(*args, **kargs): return decompose_relabel_node_degree_frequency(*args, **kargs) def rlbnod(*args, **kargs): return decompose_relabel_distinct_node_labels(*args,",
"return decompose_relabel_estimator(*args, **kargs) def rlbmdgr(*args, **kargs): return decompose_relabel_max_node_degree(*args, **kargs) def rlbdfrq(*args, **kargs): return",
"[] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [subgraph.nodes[u]['label']",
"[v for v in dict(subgraph.degree()).values()] new_signature = signature + '_node_degree_frequency_' + '_'.join(['%s:%s' %",
"new_signature = signature + '_distinct_node_labels_%d' % len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph,",
"+ '_node_size_' + node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return",
"% (k, v) for k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent(",
"= [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels =",
"from ego.component import GraphComponent from collections import Counter def decompose_relabel_node_size(graph_component): new_subgraphs_list = []",
"def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs,",
"GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list = [] new_signatures_list =",
"**kargs) def rlblfrq(*args, **kargs): return decompose_relabel_node_label_frequency(*args, **kargs) def rlbsiz(*args, **kargs): return decompose_relabel_node_size(*args, **kargs)",
"v) for k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list,",
"graph_component.signatures, preds): new_signature = signature + '_estimator_%s' % pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc =",
"**kargs) def rlbnod(*args, **kargs): return decompose_relabel_distinct_node_labels(*args, **kargs) def rlblfrq(*args, **kargs): return decompose_relabel_node_label_frequency(*args, **kargs)",
"= [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): node_size_label =",
"return gc def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature",
"def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs,",
"gc def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in",
"GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list = [] new_signatures_list =",
"signature + '_distinct_node_labels_%d' % len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list)",
"new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list",
"signature, pred in zip(graph_component.subgraphs, graph_component.signatures, preds): new_signature = signature + '_estimator_%s' % pred",
"signatures=new_signatures_list) return gc def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph,",
"graph_component.signatures): labels = [v for v in dict(subgraph.degree()).values()] new_signature = signature + '_node_degree_frequency_'",
"new_signature = signature + '_node_degree_frequency_' + '_'.join(['%s:%s' % (k, v) for k, v",
"[] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): node_size_label = '%d'",
"decompose_relabel_estimator(*args, **kargs) def rlbmdgr(*args, **kargs): return decompose_relabel_max_node_degree(*args, **kargs) def rlbdfrq(*args, **kargs): return decompose_relabel_node_degree_frequency(*args,",
"signatures=new_signatures_list) return gc def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph,",
"decompose_relabel_distinct_node_labels(*args, **kargs) def rlblfrq(*args, **kargs): return decompose_relabel_node_label_frequency(*args, **kargs) def rlbsiz(*args, **kargs): return decompose_relabel_node_size(*args,",
"'_'.join(['%s:%s' % (k, v) for k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc =",
"= signature + '_node_size_' + node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list,",
"new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list",
"zip(graph_component.subgraphs, graph_component.signatures): labels = [v for v in dict(subgraph.degree()).values()] new_signature = signature +",
"'_node_size_' + node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc",
"dict(subgraph.degree()).values()] new_signature = signature + '_node_degree_frequency_' + '_'.join(['%s:%s' % (k, v) for k,",
"+ '_node_label_frequency_' + '_'.join(['%s:%s' % (k, v) for k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph)",
"subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list = [] new_signatures_list = [] for",
"in zip(graph_component.subgraphs, graph_component.signatures, preds): new_signature = signature + '_estimator_%s' % pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature)",
"signatures=new_signatures_list) return gc def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph,",
"u in subgraph.nodes()] new_signature = signature + '_node_label_frequency_' + '_'.join(['%s:%s' % (k, v)",
"subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list = [] new_signatures_list = []",
"gc def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in",
"= [] new_signatures_list = [] preds = graph_estimator.predict(graph_component.subgraphs) for subgraph, signature, pred in",
"= [subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature = signature + '_distinct_node_labels_%d' % len(set(labels))",
"% len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def",
"signature + '_node_label_frequency_' + '_'.join(['%s:%s' % (k, v) for k, v in Counter(labels).most_common()])",
"return gc def decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list = [] new_signatures_list = [] preds =",
"**kargs): return decompose_relabel_max_node_degree(*args, **kargs) def rlbdfrq(*args, **kargs): return decompose_relabel_node_degree_frequency(*args, **kargs) def rlbnod(*args, **kargs):",
"**kargs): return decompose_relabel_distinct_node_labels(*args, **kargs) def rlblfrq(*args, **kargs): return decompose_relabel_node_label_frequency(*args, **kargs) def rlbsiz(*args, **kargs):",
"v in dict(subgraph.degree()).values()] new_signature = signature + '_max_node_degree_%d' % max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc",
"signature + '_node_degree_frequency_' + '_'.join(['%s:%s' % (k, v) for k, v in Counter(labels).most_common()])",
"rlbest(*args, **kargs): return decompose_relabel_estimator(*args, **kargs) def rlbmdgr(*args, **kargs): return decompose_relabel_max_node_degree(*args, **kargs) def rlbdfrq(*args,",
"for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [v for v in dict(subgraph.degree()).values()]",
"GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list = [] new_signatures_list",
"= GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list = [] new_signatures_list",
"ego.component import GraphComponent from collections import Counter def decompose_relabel_node_size(graph_component): new_subgraphs_list = [] new_signatures_list",
"pred in zip(graph_component.subgraphs, graph_component.signatures, preds): new_signature = signature + '_estimator_%s' % pred new_subgraphs_list.append(subgraph)",
"max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_estimator(graph_component,",
"= GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list = [] new_signatures_list",
"return gc def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature",
"= signature + '_node_label_frequency_' + '_'.join(['%s:%s' % (k, v) for k, v in",
"def rlbdfrq(*args, **kargs): return decompose_relabel_node_degree_frequency(*args, **kargs) def rlbnod(*args, **kargs): return decompose_relabel_distinct_node_labels(*args, **kargs) def",
"<gh_stars>0 #!/usr/bin/env python \"\"\"Provides scikit interface.\"\"\" from ego.component import GraphComponent from collections import",
"'%d' % subgraph.number_of_nodes() new_signature = signature + '_node_size_' + node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc",
"for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): node_size_label = '%d' % subgraph.number_of_nodes() new_signature =",
"[] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [subgraph.nodes[u]['label'] for u in",
"zip(graph_component.subgraphs, graph_component.signatures): labels = [subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature = signature +",
"= signature + '_distinct_node_labels_%d' % len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list,",
"gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list = []",
"gc def decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list = [] new_signatures_list = [] preds = graph_estimator.predict(graph_component.subgraphs)",
"graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list = [] new_signatures_list = []",
"import Counter def decompose_relabel_node_size(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature",
"for u in subgraph.nodes()] new_signature = signature + '_node_label_frequency_' + '_'.join(['%s:%s' % (k,",
"gc def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in",
"in dict(subgraph.degree()).values()] new_signature = signature + '_node_degree_frequency_' + '_'.join(['%s:%s' % (k, v) for",
"len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_degree_frequency(graph_component):",
"def rlbest(*args, **kargs): return decompose_relabel_estimator(*args, **kargs) def rlbmdgr(*args, **kargs): return decompose_relabel_max_node_degree(*args, **kargs) def",
"[v for v in dict(subgraph.degree()).values()] new_signature = signature + '_max_node_degree_%d' % max(labels) new_subgraphs_list.append(subgraph)",
"decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures):",
"= GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list = [] new_signatures_list",
"gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def rlbest(*args, **kargs): return decompose_relabel_estimator(*args,",
"= [subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature = signature + '_node_label_frequency_' + '_'.join(['%s:%s'",
"Counter def decompose_relabel_node_size(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in",
"[subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature = signature + '_distinct_node_labels_%d' % len(set(labels)) new_subgraphs_list.append(subgraph)",
"new_signatures_list = [] preds = graph_estimator.predict(graph_component.subgraphs) for subgraph, signature, pred in zip(graph_component.subgraphs, graph_component.signatures,",
"+ '_distinct_node_labels_%d' % len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return",
"from collections import Counter def decompose_relabel_node_size(graph_component): new_subgraphs_list = [] new_signatures_list = [] for",
"signature + '_node_size_' + node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list)",
"new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [v for",
"= [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [subgraph.nodes[u]['label'] for u",
"decompose_relabel_distinct_node_labels(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures):",
"for v in dict(subgraph.degree()).values()] new_signature = signature + '_node_degree_frequency_' + '_'.join(['%s:%s' % (k,",
"def rlbmdgr(*args, **kargs): return decompose_relabel_max_node_degree(*args, **kargs) def rlbdfrq(*args, **kargs): return decompose_relabel_node_degree_frequency(*args, **kargs) def",
"new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list =",
"gc def rlbest(*args, **kargs): return decompose_relabel_estimator(*args, **kargs) def rlbmdgr(*args, **kargs): return decompose_relabel_max_node_degree(*args, **kargs)",
"gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list = []",
"gc def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in",
"import GraphComponent from collections import Counter def decompose_relabel_node_size(graph_component): new_subgraphs_list = [] new_signatures_list =",
"node_size_label = '%d' % subgraph.number_of_nodes() new_signature = signature + '_node_size_' + node_size_label new_subgraphs_list.append(subgraph)",
"= GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def rlbest(*args, **kargs): return decompose_relabel_estimator(*args, **kargs)",
"rlbmdgr(*args, **kargs): return decompose_relabel_max_node_degree(*args, **kargs) def rlbdfrq(*args, **kargs): return decompose_relabel_node_degree_frequency(*args, **kargs) def rlbnod(*args,",
"graph_component.signatures): labels = [subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature = signature + '_node_label_frequency_'",
"new_signature = signature + '_estimator_%s' % pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph,",
"in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def",
"return decompose_relabel_distinct_node_labels(*args, **kargs) def rlblfrq(*args, **kargs): return decompose_relabel_node_label_frequency(*args, **kargs) def rlbsiz(*args, **kargs): return",
"graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list = [] new_signatures_list =",
"def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs,",
"signatures=new_signatures_list) return gc def rlbest(*args, **kargs): return decompose_relabel_estimator(*args, **kargs) def rlbmdgr(*args, **kargs): return",
"in subgraph.nodes()] new_signature = signature + '_node_label_frequency_' + '_'.join(['%s:%s' % (k, v) for",
"signatures=new_signatures_list) return gc def decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list = [] new_signatures_list = [] preds",
"'_node_label_frequency_' + '_'.join(['%s:%s' % (k, v) for k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature)",
"signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [v for v in dict(subgraph.degree()).values()] new_signature =",
"subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature",
"new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_estimator(graph_component, graph_estimator=None):",
"**kargs): return decompose_relabel_node_degree_frequency(*args, **kargs) def rlbnod(*args, **kargs): return decompose_relabel_distinct_node_labels(*args, **kargs) def rlblfrq(*args, **kargs):",
"graph_estimator.predict(graph_component.subgraphs) for subgraph, signature, pred in zip(graph_component.subgraphs, graph_component.signatures, preds): new_signature = signature +",
"collections import Counter def decompose_relabel_node_size(graph_component): new_subgraphs_list = [] new_signatures_list = [] for subgraph,",
"= GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list = [] new_signatures_list",
"node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_label_frequency(graph_component):",
"= [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [v for v",
"preds): new_signature = signature + '_estimator_%s' % pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent(",
"% max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def",
"in subgraph.nodes()] new_signature = signature + '_distinct_node_labels_%d' % len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc =",
"'_distinct_node_labels_%d' % len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc",
"+ '_node_degree_frequency_' + '_'.join(['%s:%s' % (k, v) for k, v in Counter(labels).most_common()]) new_subgraphs_list.append(subgraph)",
"[] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): labels = [v",
"graph_component.signatures): labels = [subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature = signature + '_distinct_node_labels_%d'",
"= [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): node_size_label = '%d' % subgraph.number_of_nodes()",
"subgraph.nodes()] new_signature = signature + '_node_label_frequency_' + '_'.join(['%s:%s' % (k, v) for k,",
"Counter(labels).most_common()]) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_distinct_node_labels(graph_component):",
"new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_degree_frequency(graph_component): new_subgraphs_list =",
"GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list = [] new_signatures_list =",
"**kargs): return decompose_relabel_estimator(*args, **kargs) def rlbmdgr(*args, **kargs): return decompose_relabel_max_node_degree(*args, **kargs) def rlbdfrq(*args, **kargs):",
"subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def rlbest(*args, **kargs): return decompose_relabel_estimator(*args, **kargs) def rlbmdgr(*args, **kargs):",
"subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): node_size_label = '%d' % subgraph.number_of_nodes() new_signature = signature",
"zip(graph_component.subgraphs, graph_component.signatures, preds): new_signature = signature + '_estimator_%s' % pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc",
"subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_node_label_frequency(graph_component): new_subgraphs_list = [] new_signatures_list = [] for",
"signature in zip(graph_component.subgraphs, graph_component.signatures): node_size_label = '%d' % subgraph.number_of_nodes() new_signature = signature +",
"pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def rlbest(*args,",
"labels = [subgraph.nodes[u]['label'] for u in subgraph.nodes()] new_signature = signature + '_node_label_frequency_' +",
"**kargs) def rlbdfrq(*args, **kargs): return decompose_relabel_node_degree_frequency(*args, **kargs) def rlbnod(*args, **kargs): return decompose_relabel_distinct_node_labels(*args, **kargs)",
"labels = [v for v in dict(subgraph.degree()).values()] new_signature = signature + '_node_degree_frequency_' +",
"[] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): node_size_label = '%d' % subgraph.number_of_nodes() new_signature",
"in zip(graph_component.subgraphs, graph_component.signatures): labels = [v for v in dict(subgraph.degree()).values()] new_signature = signature",
"rlbdfrq(*args, **kargs): return decompose_relabel_node_degree_frequency(*args, **kargs) def rlbnod(*args, **kargs): return decompose_relabel_distinct_node_labels(*args, **kargs) def rlblfrq(*args,",
"in dict(subgraph.degree()).values()] new_signature = signature + '_max_node_degree_%d' % max(labels) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc =",
"signature + '_estimator_%s' % pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list)",
"for u in subgraph.nodes()] new_signature = signature + '_distinct_node_labels_%d' % len(set(labels)) new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature)",
"= graph_estimator.predict(graph_component.subgraphs) for subgraph, signature, pred in zip(graph_component.subgraphs, graph_component.signatures, preds): new_signature = signature",
"new_subgraphs_list = [] new_signatures_list = [] for subgraph, signature in zip(graph_component.subgraphs, graph_component.signatures): node_size_label",
"'_estimator_%s' % pred new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc",
"rlbnod(*args, **kargs): return decompose_relabel_distinct_node_labels(*args, **kargs) def rlblfrq(*args, **kargs): return decompose_relabel_node_label_frequency(*args, **kargs) def rlbsiz(*args,",
"+ node_size_label new_subgraphs_list.append(subgraph) new_signatures_list.append(new_signature) gc = GraphComponent( graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def",
"graph=graph_component.graph, subgraphs=new_subgraphs_list, signatures=new_signatures_list) return gc def decompose_relabel_max_node_degree(graph_component): new_subgraphs_list = [] new_signatures_list = []",
"\"\"\"Provides scikit interface.\"\"\" from ego.component import GraphComponent from collections import Counter def decompose_relabel_node_size(graph_component):",
"def decompose_relabel_estimator(graph_component, graph_estimator=None): new_subgraphs_list = [] new_signatures_list = [] preds = graph_estimator.predict(graph_component.subgraphs) for"
] |
[
"kwargs.items()) def ackley(x_1=None, x_2=None, a=20, b=0.2, c=2*math.pi): d = 2 return -a *",
"_, x_i in kwargs.items()) def ackley(x_1=None, x_2=None, a=20, b=0.2, c=2*math.pi): d = 2",
"x_1) + np.cos(c * x_2)) + a + np.exp(1) def flower(**kwargs): a =",
"d)) - np.exp(np.cos(c * x_1) + np.cos(c * x_2)) + a + np.exp(1)",
"flower(**kwargs): a = 1 b = 2 c = 4 x_1 = kwargs['x_1']",
"as np def quadratic(**kwargs) -> float: return sum(x_i ** 2 for _, x_i",
"= np.sqrt(x_1**2 + x_2**2) return a * x_norm + b * np.sin(c *",
"* x_1) + np.cos(c * x_2)) + a + np.exp(1) def flower(**kwargs): a",
"2 return -a * np.exp(-b * np.sqrt((x_1**2 + x_2**2) / d)) - np.exp(np.cos(c",
"x_2**2) / d)) - np.exp(np.cos(c * x_1) + np.cos(c * x_2)) + a",
"np.exp(1) def flower(**kwargs): a = 1 b = 2 c = 4 x_1",
"(c) Microsoft Corporation. # Licensed under the MIT License. # import math import",
"import math import numpy as np def quadratic(**kwargs) -> float: return sum(x_i **",
"x_2=None, a=20, b=0.2, c=2*math.pi): d = 2 return -a * np.exp(-b * np.sqrt((x_1**2",
"MIT License. # import math import numpy as np def quadratic(**kwargs) -> float:",
"in kwargs.items()) def ackley(x_1=None, x_2=None, a=20, b=0.2, c=2*math.pi): d = 2 return -a",
"* x_2)) + a + np.exp(1) def flower(**kwargs): a = 1 b =",
"a = 1 b = 2 c = 4 x_1 = kwargs['x_1'] x_2",
"2 c = 4 x_1 = kwargs['x_1'] x_2 = kwargs['x_2'] x_norm = np.sqrt(x_1**2",
"for _, x_i in kwargs.items()) def ackley(x_1=None, x_2=None, a=20, b=0.2, c=2*math.pi): d =",
"np def quadratic(**kwargs) -> float: return sum(x_i ** 2 for _, x_i in",
"+ x_2**2) / d)) - np.exp(np.cos(c * x_1) + np.cos(c * x_2)) +",
"= kwargs['x_2'] x_norm = np.sqrt(x_1**2 + x_2**2) return a * x_norm + b",
"# Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # import",
"def flower(**kwargs): a = 1 b = 2 c = 4 x_1 =",
"np.sqrt(x_1**2 + x_2**2) return a * x_norm + b * np.sin(c * np.arctan2(x_1,",
"1 b = 2 c = 4 x_1 = kwargs['x_1'] x_2 = kwargs['x_2']",
"c=2*math.pi): d = 2 return -a * np.exp(-b * np.sqrt((x_1**2 + x_2**2) /",
"kwargs['x_1'] x_2 = kwargs['x_2'] x_norm = np.sqrt(x_1**2 + x_2**2) return a * x_norm",
"np.exp(-b * np.sqrt((x_1**2 + x_2**2) / d)) - np.exp(np.cos(c * x_1) + np.cos(c",
"4 x_1 = kwargs['x_1'] x_2 = kwargs['x_2'] x_norm = np.sqrt(x_1**2 + x_2**2) return",
"* np.sqrt((x_1**2 + x_2**2) / d)) - np.exp(np.cos(c * x_1) + np.cos(c *",
"Licensed under the MIT License. # import math import numpy as np def",
"a + np.exp(1) def flower(**kwargs): a = 1 b = 2 c =",
"quadratic(**kwargs) -> float: return sum(x_i ** 2 for _, x_i in kwargs.items()) def",
"- np.exp(np.cos(c * x_1) + np.cos(c * x_2)) + a + np.exp(1) def",
"= 2 return -a * np.exp(-b * np.sqrt((x_1**2 + x_2**2) / d)) -",
"numpy as np def quadratic(**kwargs) -> float: return sum(x_i ** 2 for _,",
"np.sqrt((x_1**2 + x_2**2) / d)) - np.exp(np.cos(c * x_1) + np.cos(c * x_2))",
"-a * np.exp(-b * np.sqrt((x_1**2 + x_2**2) / d)) - np.exp(np.cos(c * x_1)",
"= 1 b = 2 c = 4 x_1 = kwargs['x_1'] x_2 =",
"under the MIT License. # import math import numpy as np def quadratic(**kwargs)",
"the MIT License. # import math import numpy as np def quadratic(**kwargs) ->",
"return -a * np.exp(-b * np.sqrt((x_1**2 + x_2**2) / d)) - np.exp(np.cos(c *",
"np.cos(c * x_2)) + a + np.exp(1) def flower(**kwargs): a = 1 b",
"2 for _, x_i in kwargs.items()) def ackley(x_1=None, x_2=None, a=20, b=0.2, c=2*math.pi): d",
"a=20, b=0.2, c=2*math.pi): d = 2 return -a * np.exp(-b * np.sqrt((x_1**2 +",
"x_2)) + a + np.exp(1) def flower(**kwargs): a = 1 b = 2",
"b=0.2, c=2*math.pi): d = 2 return -a * np.exp(-b * np.sqrt((x_1**2 + x_2**2)",
"+ a + np.exp(1) def flower(**kwargs): a = 1 b = 2 c",
"c = 4 x_1 = kwargs['x_1'] x_2 = kwargs['x_2'] x_norm = np.sqrt(x_1**2 +",
"= 2 c = 4 x_1 = kwargs['x_1'] x_2 = kwargs['x_2'] x_norm =",
"# Licensed under the MIT License. # import math import numpy as np",
"+ np.cos(c * x_2)) + a + np.exp(1) def flower(**kwargs): a = 1",
"return sum(x_i ** 2 for _, x_i in kwargs.items()) def ackley(x_1=None, x_2=None, a=20,",
"kwargs['x_2'] x_norm = np.sqrt(x_1**2 + x_2**2) return a * x_norm + b *",
"-> float: return sum(x_i ** 2 for _, x_i in kwargs.items()) def ackley(x_1=None,",
"np.exp(np.cos(c * x_1) + np.cos(c * x_2)) + a + np.exp(1) def flower(**kwargs):",
"x_1 = kwargs['x_1'] x_2 = kwargs['x_2'] x_norm = np.sqrt(x_1**2 + x_2**2) return a",
"Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # import math",
"import numpy as np def quadratic(**kwargs) -> float: return sum(x_i ** 2 for",
"= kwargs['x_1'] x_2 = kwargs['x_2'] x_norm = np.sqrt(x_1**2 + x_2**2) return a *",
"** 2 for _, x_i in kwargs.items()) def ackley(x_1=None, x_2=None, a=20, b=0.2, c=2*math.pi):",
"ackley(x_1=None, x_2=None, a=20, b=0.2, c=2*math.pi): d = 2 return -a * np.exp(-b *",
"b = 2 c = 4 x_1 = kwargs['x_1'] x_2 = kwargs['x_2'] x_norm",
"= 4 x_1 = kwargs['x_1'] x_2 = kwargs['x_2'] x_norm = np.sqrt(x_1**2 + x_2**2)",
"# # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. #",
"License. # import math import numpy as np def quadratic(**kwargs) -> float: return",
"Corporation. # Licensed under the MIT License. # import math import numpy as",
"+ np.exp(1) def flower(**kwargs): a = 1 b = 2 c = 4",
"x_2 = kwargs['x_2'] x_norm = np.sqrt(x_1**2 + x_2**2) return a * x_norm +",
"float: return sum(x_i ** 2 for _, x_i in kwargs.items()) def ackley(x_1=None, x_2=None,",
"x_i in kwargs.items()) def ackley(x_1=None, x_2=None, a=20, b=0.2, c=2*math.pi): d = 2 return",
"sum(x_i ** 2 for _, x_i in kwargs.items()) def ackley(x_1=None, x_2=None, a=20, b=0.2,",
"math import numpy as np def quadratic(**kwargs) -> float: return sum(x_i ** 2",
"# import math import numpy as np def quadratic(**kwargs) -> float: return sum(x_i",
"def quadratic(**kwargs) -> float: return sum(x_i ** 2 for _, x_i in kwargs.items())",
"d = 2 return -a * np.exp(-b * np.sqrt((x_1**2 + x_2**2) / d))",
"+ x_2**2) return a * x_norm + b * np.sin(c * np.arctan2(x_1, x_2))",
"Microsoft Corporation. # Licensed under the MIT License. # import math import numpy",
"def ackley(x_1=None, x_2=None, a=20, b=0.2, c=2*math.pi): d = 2 return -a * np.exp(-b",
"* np.exp(-b * np.sqrt((x_1**2 + x_2**2) / d)) - np.exp(np.cos(c * x_1) +",
"x_norm = np.sqrt(x_1**2 + x_2**2) return a * x_norm + b * np.sin(c",
"/ d)) - np.exp(np.cos(c * x_1) + np.cos(c * x_2)) + a +"
] |
[
"filename = secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img, model, filename, debug=False) #time.sleep(2)",
"pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST', 'GET']) def hello(): return('Hello') @app.route('/', methods=['POST', 'GET']) def index(): return",
"file part file = request.files['imageUploadForm'] filename = secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename))",
"import Flask, url_for, render_template, request, redirect from flask_sqlalchemy import SQLAlchemy from datetime import",
"'jpg', 'jpeg', 'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] = True model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST',",
"request.method == 'POST': # check if the post request has the file part",
"render_template, request, redirect from flask_sqlalchemy import SQLAlchemy from datetime import datetime from werkzeug.utils",
"= request.files['imageUploadForm'] filename = secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img, model, filename,",
"'POST': # check if the post request has the file part file =",
"model, filename, debug=False) #time.sleep(2) return render_template('home.html', value=filename) if __name__==\"__main__\": run_simple('127.0.0.1', 9100, app, use_reloader=False)",
"file = request.files['imageUploadForm'] filename = secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img, model,",
"from flask import Flask, url_for, render_template, request, redirect from flask_sqlalchemy import SQLAlchemy from",
"import cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH = path2File+'/static/input' ALLOWED_EXTENSIONS =",
"from cv2 import cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH = path2File+'/static/input'",
"img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img, model, filename, debug=False) #time.sleep(2) return render_template('home.html', value=filename) if __name__==\"__main__\": run_simple('127.0.0.1',",
"filename)) id_class_detector(img, model, filename, debug=False) #time.sleep(2) return render_template('home.html', value=filename) if __name__==\"__main__\": run_simple('127.0.0.1', 9100,",
"pathToModel=path2File+'/WorkArea/FRCNN' PATH = path2File+'/static/input' ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] =",
"flask import Flask, url_for, render_template, request, redirect from flask_sqlalchemy import SQLAlchemy from datetime",
"= secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img, model, filename, debug=False) #time.sleep(2) return",
"import os import time from cv2 import cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__))",
"print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img, model, filename, debug=False) #time.sleep(2) return render_template('home.html', value=filename) if __name__==\"__main__\":",
"model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST', 'GET']) def hello(): return('Hello') @app.route('/', methods=['POST', 'GET'])",
"werkzeug.utils import secure_filename from werkzeug.serving import run_simple from id_class_locator import id_class_detector import os",
"if request.method == 'POST': # check if the post request has the file",
"path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH = path2File+'/static/input' ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['PATH']=PATH",
"run_simple from id_class_locator import id_class_detector import os import time from cv2 import cv2",
"request, redirect from flask_sqlalchemy import SQLAlchemy from datetime import datetime from werkzeug.utils import",
"methods=['POST', 'GET']) def upload(): if request.method == 'POST': # check if the post",
"SQLAlchemy from datetime import datetime from werkzeug.utils import secure_filename from werkzeug.serving import run_simple",
"from flask_sqlalchemy import SQLAlchemy from datetime import datetime from werkzeug.utils import secure_filename from",
"'GET']) def upload(): if request.method == 'POST': # check if the post request",
"the file part file = request.files['imageUploadForm'] filename = secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'],",
"part file = request.files['imageUploadForm'] filename = secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img,",
"@app.route('/', methods=['POST', 'GET']) def index(): return render_template('home.html') @app.route('/upload', methods=['POST', 'GET']) def upload(): if",
"# check if the post request has the file part file = request.files['imageUploadForm']",
"ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] = True model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb',",
"methods=['POST', 'GET']) def hello(): return('Hello') @app.route('/', methods=['POST', 'GET']) def index(): return render_template('home.html') @app.route('/upload',",
"if the post request has the file part file = request.files['imageUploadForm'] filename =",
"post request has the file part file = request.files['imageUploadForm'] filename = secure_filename(file.filename) file.save(os.path.join(app.config['PATH'],",
"secure_filename from werkzeug.serving import run_simple from id_class_locator import id_class_detector import os import time",
"has the file part file = request.files['imageUploadForm'] filename = secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename)) print(filename)",
"return('Hello') @app.route('/', methods=['POST', 'GET']) def index(): return render_template('home.html') @app.route('/upload', methods=['POST', 'GET']) def upload():",
"from datetime import datetime from werkzeug.utils import secure_filename from werkzeug.serving import run_simple from",
"the post request has the file part file = request.files['imageUploadForm'] filename = secure_filename(file.filename)",
"time from cv2 import cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH =",
"os import time from cv2 import cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN'",
"#app.config[\"TEMPLATES_AUTO_RELOAD\"] = True model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST', 'GET']) def hello(): return('Hello')",
"redirect from flask_sqlalchemy import SQLAlchemy from datetime import datetime from werkzeug.utils import secure_filename",
"datetime from werkzeug.utils import secure_filename from werkzeug.serving import run_simple from id_class_locator import id_class_detector",
"upload(): if request.method == 'POST': # check if the post request has the",
"def index(): return render_template('home.html') @app.route('/upload', methods=['POST', 'GET']) def upload(): if request.method == 'POST':",
"Flask, url_for, render_template, request, redirect from flask_sqlalchemy import SQLAlchemy from datetime import datetime",
"id_class_detector import os import time from cv2 import cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File=",
"file.save(os.path.join(app.config['PATH'], filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img, model, filename, debug=False) #time.sleep(2) return render_template('home.html', value=filename)",
"filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img, model, filename, debug=False) #time.sleep(2) return render_template('home.html', value=filename) if",
"flask_sqlalchemy import SQLAlchemy from datetime import datetime from werkzeug.utils import secure_filename from werkzeug.serving",
"return render_template('home.html') @app.route('/upload', methods=['POST', 'GET']) def upload(): if request.method == 'POST': # check",
"request.files['imageUploadForm'] filename = secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img, model, filename, debug=False)",
"'jpeg', 'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] = True model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST', 'GET'])",
"methods=['POST', 'GET']) def index(): return render_template('home.html') @app.route('/upload', methods=['POST', 'GET']) def upload(): if request.method",
"from id_class_locator import id_class_detector import os import time from cv2 import cv2 app=Flask(__name__)",
"import run_simple from id_class_locator import id_class_detector import os import time from cv2 import",
"#db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH = path2File+'/static/input' ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])",
"import SQLAlchemy from datetime import datetime from werkzeug.utils import secure_filename from werkzeug.serving import",
"app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] = True model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST', 'GET']) def hello():",
"import time from cv2 import cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH",
"'GET']) def index(): return render_template('home.html') @app.route('/upload', methods=['POST', 'GET']) def upload(): if request.method ==",
"os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH = path2File+'/static/input' ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"]",
"cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST', 'GET']) def hello(): return('Hello') @app.route('/', methods=['POST', 'GET']) def index():",
"#app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH = path2File+'/static/input' ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg',",
"import secure_filename from werkzeug.serving import run_simple from id_class_locator import id_class_detector import os import",
"'GET']) def hello(): return('Hello') @app.route('/', methods=['POST', 'GET']) def index(): return render_template('home.html') @app.route('/upload', methods=['POST',",
"import datetime from werkzeug.utils import secure_filename from werkzeug.serving import run_simple from id_class_locator import",
"render_template('home.html') @app.route('/upload', methods=['POST', 'GET']) def upload(): if request.method == 'POST': # check if",
"path2File+'/static/input' ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] = True model =",
"cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH = path2File+'/static/input' ALLOWED_EXTENSIONS = set(['png',",
"import id_class_detector import os import time from cv2 import cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app)",
"@app.route('/hello', methods=['POST', 'GET']) def hello(): return('Hello') @app.route('/', methods=['POST', 'GET']) def index(): return render_template('home.html')",
"hello(): return('Hello') @app.route('/', methods=['POST', 'GET']) def index(): return render_template('home.html') @app.route('/upload', methods=['POST', 'GET']) def",
"from werkzeug.utils import secure_filename from werkzeug.serving import run_simple from id_class_locator import id_class_detector import",
"from werkzeug.serving import run_simple from id_class_locator import id_class_detector import os import time from",
"set(['png', 'jpg', 'jpeg', 'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] = True model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello',",
"= cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST', 'GET']) def hello(): return('Hello') @app.route('/', methods=['POST', 'GET']) def",
"secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename)) print(filename) img=cv2.imread(os.path.join(app.config['PATH'], filename)) id_class_detector(img, model, filename, debug=False) #time.sleep(2) return render_template('home.html',",
"request has the file part file = request.files['imageUploadForm'] filename = secure_filename(file.filename) file.save(os.path.join(app.config['PATH'], filename))",
"== 'POST': # check if the post request has the file part file",
"= set(['png', 'jpg', 'jpeg', 'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] = True model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt')",
"index(): return render_template('home.html') @app.route('/upload', methods=['POST', 'GET']) def upload(): if request.method == 'POST': #",
"@app.route('/upload', methods=['POST', 'GET']) def upload(): if request.method == 'POST': # check if the",
"id_class_detector(img, model, filename, debug=False) #time.sleep(2) return render_template('home.html', value=filename) if __name__==\"__main__\": run_simple('127.0.0.1', 9100, app,",
"= True model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST', 'GET']) def hello(): return('Hello') @app.route('/',",
"True model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST', 'GET']) def hello(): return('Hello') @app.route('/', methods=['POST',",
"url_for, render_template, request, redirect from flask_sqlalchemy import SQLAlchemy from datetime import datetime from",
"def upload(): if request.method == 'POST': # check if the post request has",
"check if the post request has the file part file = request.files['imageUploadForm'] filename",
"werkzeug.serving import run_simple from id_class_locator import id_class_detector import os import time from cv2",
"PATH = path2File+'/static/input' ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] = True",
"datetime import datetime from werkzeug.utils import secure_filename from werkzeug.serving import run_simple from id_class_locator",
"id_class_locator import id_class_detector import os import time from cv2 import cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db'",
"app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH = path2File+'/static/input' ALLOWED_EXTENSIONS = set(['png', 'jpg',",
"= path2File+'/static/input' ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] = True model",
"def hello(): return('Hello') @app.route('/', methods=['POST', 'GET']) def index(): return render_template('home.html') @app.route('/upload', methods=['POST', 'GET'])",
"'gif']) app.config['PATH']=PATH #app.config[\"TEMPLATES_AUTO_RELOAD\"] = True model = cv2.dnn.readNetFromTensorflow(pathToModel+'/frozen_inference_graph.pb', pathToModel+'/frcnn.pbtxt') @app.route('/hello', methods=['POST', 'GET']) def",
"cv2 import cv2 app=Flask(__name__) #app.config['SQLALCHEMY_DATABASE_URI']='sqlite:///my.db' #db=SQLAlchemy(app) path2File= os.path.dirname(os.path.realpath(__file__)) pathToModel=path2File+'/WorkArea/FRCNN' PATH = path2File+'/static/input' ALLOWED_EXTENSIONS"
] |
[
"distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY",
"= config_location self.logger = logger def read_config(self): config = ConfigParser() config.read(self.config_location) enable_insert_overwrite =",
"{2}\".format(file_type, source_location, destination_location)) s3_bucket, prefix = self.split_s3_path(source_location) return self.download_input_from_s3(s3_bucket, prefix, destination_location) else: return",
"License for the # specific language governing permissions and limitations # under the",
"prefix = \"/\".join(path_parts) return s3_bucket, prefix def download_input_from_s3(self, s3_bucket, prefix, destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket,",
"split_s3_path(self, s3_location): path_parts = s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\") s3_bucket = path_parts.pop(0) prefix = \"/\".join(path_parts)",
"(ASF) under one # or more contributor license agreements. See the NOTICE file",
"enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not able to read the athena config\") sys.exit(os.EX_CONFIG) return",
"able to read the ATHENA_OUTPUT_LOCATION from the config \") sys.exit(os.EX_CONFIG) if 'STAGING_DB' in",
"\"\").replace(S3A, \"\").split(\"/\") s3_bucket = path_parts.pop(0) prefix = \"/\".join(path_parts) return s3_bucket, prefix def download_input_from_s3(self,",
"software distributed under the License is distributed on an # \"AS IS\" BASIS,",
"TransferNotDoneError import os import sys S3 = \"s3://\" S3A = \"s3a://\" class ConfigReader:",
"download the file {0}\".format(e)) sys.exit(os.EX_OSERR) return destination_location def get_file(self,file_type, source_location, destination_location): if source_location.startswith(S3)",
"from s3 to local : {0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR) except TransferNotDoneError as e: self.logger.fatal(\"Unable",
"under the License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES",
"additional information # regarding copyright ownership. The ASF licenses this file # to",
"from {1} to {2}\".format(file_type, source_location, destination_location)) s3_bucket, prefix = self.split_s3_path(source_location) return self.download_input_from_s3(s3_bucket, prefix,",
"# \"License\"); you may not use this file except in compliance # with",
"local : {0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR) except TransferNotDoneError as e: self.logger.fatal(\"Unable to download the",
"import RetriesExceededError from s3transfer.exceptions import TransferNotDoneError import os import sys S3 = \"s3://\"",
"logger def read_config(self): config = ConfigParser() config.read(self.config_location) enable_insert_overwrite = 'True' enable_external_table_drop = 'True'",
"Licensed to the Apache Software Foundation (ASF) under one # or more contributor",
"\") sys.exit(os.EX_CONFIG) if 'athena' in config: if 'ATHENA_OUTPUT_LOCATION' in config['athena']: athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION']",
"limitations # under the License. from configparser import ConfigParser from s3transfer import RetriesExceededError",
"or more contributor license agreements. See the NOTICE file # distributed with this",
"read the athena config\") sys.exit(os.EX_CONFIG) return aws_region, athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop class FileReader:",
"OR CONDITIONS OF ANY # KIND, either express or implied. See the License",
"Foundation (ASF) under one # or more contributor license agreements. See the NOTICE",
"Apache Software Foundation (ASF) under one # or more contributor license agreements. See",
"self.logger.error(\"Not able to read the region from the config \") sys.exit(os.EX_CONFIG) if 'athena'",
"self.logger.fatal(\"Unable to download the file {0}\".format(e)) self.logger.fatal(\"Unable to download the file from s3",
"config_location): self.config_location = config_location self.logger = logger def read_config(self): config = ConfigParser() config.read(self.config_location)",
"ConfigParser from s3transfer import RetriesExceededError from s3transfer.exceptions import TransferNotDoneError import os import sys",
"S3 = \"s3://\" S3A = \"s3a://\" class ConfigReader: def __init__(self, logger, config_location): self.config_location",
"self.logger = logger self.s3_resource = s3_resource def split_s3_path(self, s3_location): path_parts = s3_location.replace(S3, \"\").replace(S3A,",
"in compliance # with the License. You may obtain a copy of the",
"implied. See the License for the # specific language governing permissions and limitations",
"read the ATHENA_OUTPUT_LOCATION from the config \") sys.exit(os.EX_CONFIG) if 'STAGING_DB' in config['athena']: staging_db",
"or agreed to in writing, # software distributed under the License is distributed",
"and limitations # under the License. from configparser import ConfigParser from s3transfer import",
"config \") sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE' in config['athena']: enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP' in",
"license agreements. See the NOTICE file # distributed with this work for additional",
"'athena' in config: if 'ATHENA_OUTPUT_LOCATION' in config['athena']: athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not able",
"\"License\"); you may not use this file except in compliance # with the",
"enable_external_table_drop = 'True' if 'aws' in config and 'region' in config['aws']: aws_region =",
"either express or implied. See the License for the # specific language governing",
"to read the ATHENA_OUTPUT_LOCATION from the config \") sys.exit(os.EX_CONFIG) if 'STAGING_DB' in config['athena']:",
"import ConfigParser from s3transfer import RetriesExceededError from s3transfer.exceptions import TransferNotDoneError import os import",
"not use this file except in compliance # with the License. You may",
"self.logger.info(\"Downloading the {0} from {1} to {2}\".format(file_type, source_location, destination_location)) s3_bucket, prefix = self.split_s3_path(source_location)",
"# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to",
"# or more contributor license agreements. See the NOTICE file # distributed with",
"self.logger.error(\"Not able to read the athena config\") sys.exit(os.EX_CONFIG) return aws_region, athena_output_location, staging_db, enable_insert_overwrite,",
"class ConfigReader: def __init__(self, logger, config_location): self.config_location = config_location self.logger = logger def",
"athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop class FileReader: def __init__(self, logger, s3_resource): self.logger = logger",
"= 'True' enable_external_table_drop = 'True' if 'aws' in config and 'region' in config['aws']:",
"WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the",
"source_location.startswith(S3A): self.logger.info(\"Downloading the {0} from {1} to {2}\".format(file_type, source_location, destination_location)) s3_bucket, prefix =",
"= logger def read_config(self): config = ConfigParser() config.read(self.config_location) enable_insert_overwrite = 'True' enable_external_table_drop =",
"the file {0}\".format(e)) self.logger.fatal(\"Unable to download the file from s3 to local :",
"the file {0}\".format(e)) sys.exit(os.EX_OSERR) return destination_location def get_file(self,file_type, source_location, destination_location): if source_location.startswith(S3) or",
"the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law",
"in config and 'region' in config['aws']: aws_region = config['aws']['region'] else: self.logger.error(\"Not able to",
"logger self.s3_resource = s3_resource def split_s3_path(self, s3_location): path_parts = s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\") s3_bucket",
"config \") sys.exit(os.EX_CONFIG) if 'athena' in config: if 'ATHENA_OUTPUT_LOCATION' in config['athena']: athena_output_location =",
"if 'athena' in config: if 'ATHENA_OUTPUT_LOCATION' in config['athena']: athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not",
"get_file(self,file_type, source_location, destination_location): if source_location.startswith(S3) or source_location.startswith(S3A): self.logger.info(\"Downloading the {0} from {1} to",
"# regarding copyright ownership. The ASF licenses this file # to you under",
"more contributor license agreements. See the NOTICE file # distributed with this work",
"config\") sys.exit(os.EX_CONFIG) return aws_region, athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop class FileReader: def __init__(self, logger,",
"{0}\".format(e)) sys.exit(os.EX_OSERR) return destination_location def get_file(self,file_type, source_location, destination_location): if source_location.startswith(S3) or source_location.startswith(S3A): self.logger.info(\"Downloading",
"FileReader: def __init__(self, logger, s3_resource): self.logger = logger self.s3_resource = s3_resource def split_s3_path(self,",
"is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF",
"athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not able to read the ATHENA_OUTPUT_LOCATION from the config",
"self.logger.fatal(\"Unable to download the file {0}\".format(e)) sys.exit(os.EX_OSERR) return destination_location def get_file(self,file_type, source_location, destination_location):",
"CONDITIONS OF ANY # KIND, either express or implied. See the License for",
"download_input_from_s3(self, s3_bucket, prefix, destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location) except RetriesExceededError as e: self.logger.fatal(\"Unable",
"work for additional information # regarding copyright ownership. The ASF licenses this file",
"licenses this file # to you under the Apache License, Version 2.0 (the",
"in config['athena']: enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']: enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE'] else:",
"# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either",
"# # Licensed to the Apache Software Foundation (ASF) under one # or",
"s3_resource def split_s3_path(self, s3_location): path_parts = s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\") s3_bucket = path_parts.pop(0) prefix",
"express or implied. See the License for the # specific language governing permissions",
"STAGING_DB from the config \") sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE' in config['athena']: enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE']",
"You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #",
"enable_insert_overwrite, enable_external_table_drop class FileReader: def __init__(self, logger, s3_resource): self.logger = logger self.s3_resource =",
"you under the Apache License, Version 2.0 (the # \"License\"); you may not",
"read the STAGING_DB from the config \") sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE' in config['athena']: enable_insert_overwrite",
"download the file from s3 to local : {0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR) except TransferNotDoneError",
"def read_config(self): config = ConfigParser() config.read(self.config_location) enable_insert_overwrite = 'True' enable_external_table_drop = 'True' if",
"License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS",
"destination_location) except RetriesExceededError as e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) self.logger.fatal(\"Unable to",
"{0} from {1} to {2}\".format(file_type, source_location, destination_location)) s3_bucket, prefix = self.split_s3_path(source_location) return self.download_input_from_s3(s3_bucket,",
"= 'True' if 'aws' in config and 'region' in config['aws']: aws_region = config['aws']['region']",
"sys S3 = \"s3://\" S3A = \"s3a://\" class ConfigReader: def __init__(self, logger, config_location):",
"config['aws']['region'] else: self.logger.error(\"Not able to read the region from the config \") sys.exit(os.EX_CONFIG)",
"s3 to local : {0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR) except TransferNotDoneError as e: self.logger.fatal(\"Unable to",
"= path_parts.pop(0) prefix = \"/\".join(path_parts) return s3_bucket, prefix def download_input_from_s3(self, s3_bucket, prefix, destination_location):",
"of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable",
"config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not able to read the athena config\") sys.exit(os.EX_CONFIG) return aws_region, athena_output_location,",
"path_parts = s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\") s3_bucket = path_parts.pop(0) prefix = \"/\".join(path_parts) return s3_bucket,",
"under the Apache License, Version 2.0 (the # \"License\"); you may not use",
"RetriesExceededError from s3transfer.exceptions import TransferNotDoneError import os import sys S3 = \"s3://\" S3A",
"s3_location): path_parts = s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\") s3_bucket = path_parts.pop(0) prefix = \"/\".join(path_parts) return",
"License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0",
"self.logger = logger def read_config(self): config = ConfigParser() config.read(self.config_location) enable_insert_overwrite = 'True' enable_external_table_drop",
"s3_bucket = path_parts.pop(0) prefix = \"/\".join(path_parts) return s3_bucket, prefix def download_input_from_s3(self, s3_bucket, prefix,",
"or implied. See the License for the # specific language governing permissions and",
"in config['athena']: enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not able to read the athena config\")",
"distributed under the License is distributed on an # \"AS IS\" BASIS, WITHOUT",
"source_location, destination_location)) s3_bucket, prefix = self.split_s3_path(source_location) return self.download_input_from_s3(s3_bucket, prefix, destination_location) else: return source_location",
"the License. from configparser import ConfigParser from s3transfer import RetriesExceededError from s3transfer.exceptions import",
"able to read the athena config\") sys.exit(os.EX_CONFIG) return aws_region, athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop",
"destination_location def get_file(self,file_type, source_location, destination_location): if source_location.startswith(S3) or source_location.startswith(S3A): self.logger.info(\"Downloading the {0} from",
"self.s3_resource = s3_resource def split_s3_path(self, s3_location): path_parts = s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\") s3_bucket =",
"else: self.logger.error(\"Not able to read the ATHENA_OUTPUT_LOCATION from the config \") sys.exit(os.EX_CONFIG) if",
"\"\").split(\"/\") s3_bucket = path_parts.pop(0) prefix = \"/\".join(path_parts) return s3_bucket, prefix def download_input_from_s3(self, s3_bucket,",
"if 'STAGING_DB' in config['athena']: staging_db = config['athena']['STAGING_DB'] else: self.logger.error(\"Not able to read the",
"Unless required by applicable law or agreed to in writing, # software distributed",
"distributed with this work for additional information # regarding copyright ownership. The ASF",
"from s3transfer.exceptions import TransferNotDoneError import os import sys S3 = \"s3://\" S3A =",
"except RetriesExceededError as e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) self.logger.fatal(\"Unable to download",
"the athena config\") sys.exit(os.EX_CONFIG) return aws_region, athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop class FileReader: def",
"regarding copyright ownership. The ASF licenses this file # to you under the",
"enable_external_table_drop class FileReader: def __init__(self, logger, s3_resource): self.logger = logger self.s3_resource = s3_resource",
"# KIND, either express or implied. See the License for the # specific",
"this work for additional information # regarding copyright ownership. The ASF licenses this",
"ANY # KIND, either express or implied. See the License for the #",
"in config['athena']: staging_db = config['athena']['STAGING_DB'] else: self.logger.error(\"Not able to read the STAGING_DB from",
"contributor license agreements. See the NOTICE file # distributed with this work for",
"governing permissions and limitations # under the License. from configparser import ConfigParser from",
"= \"/\".join(path_parts) return s3_bucket, prefix def download_input_from_s3(self, s3_bucket, prefix, destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket, prefix,",
"copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by",
"if 'ENABLE_INSERT_OVERWRITE' in config['athena']: enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']: enable_external_table_drop =",
"download the file {0}\".format(e)) self.logger.fatal(\"Unable to download the file from s3 to local",
"aws_region, athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop class FileReader: def __init__(self, logger, s3_resource): self.logger =",
"config['athena']: enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']: enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not",
"to {2}\".format(file_type, source_location, destination_location)) s3_bucket, prefix = self.split_s3_path(source_location) return self.download_input_from_s3(s3_bucket, prefix, destination_location) else:",
"See the License for the # specific language governing permissions and limitations #",
"return destination_location def get_file(self,file_type, source_location, destination_location): if source_location.startswith(S3) or source_location.startswith(S3A): self.logger.info(\"Downloading the {0}",
"IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or",
"'aws' in config and 'region' in config['aws']: aws_region = config['aws']['region'] else: self.logger.error(\"Not able",
"2.0 (the # \"License\"); you may not use this file except in compliance",
"region from the config \") sys.exit(os.EX_CONFIG) if 'athena' in config: if 'ATHENA_OUTPUT_LOCATION' in",
"self.logger.error(\"Not able to read the ATHENA_OUTPUT_LOCATION from the config \") sys.exit(os.EX_CONFIG) if 'STAGING_DB'",
"s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\") s3_bucket = path_parts.pop(0) prefix = \"/\".join(path_parts) return s3_bucket, prefix def",
"s3transfer.exceptions import TransferNotDoneError import os import sys S3 = \"s3://\" S3A = \"s3a://\"",
"KIND, either express or implied. See the License for the # specific language",
"config.read(self.config_location) enable_insert_overwrite = 'True' enable_external_table_drop = 'True' if 'aws' in config and 'region'",
"else: self.logger.error(\"Not able to read the athena config\") sys.exit(os.EX_CONFIG) return aws_region, athena_output_location, staging_db,",
"obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless",
"__init__(self, logger, config_location): self.config_location = config_location self.logger = logger def read_config(self): config =",
"config_location self.logger = logger def read_config(self): config = ConfigParser() config.read(self.config_location) enable_insert_overwrite = 'True'",
"the config \") sys.exit(os.EX_CONFIG) if 'athena' in config: if 'ATHENA_OUTPUT_LOCATION' in config['athena']: athena_output_location",
"= \"s3a://\" class ConfigReader: def __init__(self, logger, config_location): self.config_location = config_location self.logger =",
"compliance # with the License. You may obtain a copy of the License",
"WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See",
"with the License. You may obtain a copy of the License at #",
"__init__(self, logger, s3_resource): self.logger = logger self.s3_resource = s3_resource def split_s3_path(self, s3_location): path_parts",
"information # regarding copyright ownership. The ASF licenses this file # to you",
"from configparser import ConfigParser from s3transfer import RetriesExceededError from s3transfer.exceptions import TransferNotDoneError import",
"and 'region' in config['aws']: aws_region = config['aws']['region'] else: self.logger.error(\"Not able to read the",
"may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #",
"one # or more contributor license agreements. See the NOTICE file # distributed",
"specific language governing permissions and limitations # under the License. from configparser import",
"except in compliance # with the License. You may obtain a copy of",
"return s3_bucket, prefix def download_input_from_s3(self, s3_bucket, prefix, destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location) except",
"e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) sys.exit(os.EX_OSERR) return destination_location def get_file(self,file_type, source_location,",
"http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,",
"you may not use this file except in compliance # with the License.",
"sys.exit(os.EX_CONFIG) if 'athena' in config: if 'ATHENA_OUTPUT_LOCATION' in config['athena']: athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION'] else:",
"ConfigParser() config.read(self.config_location) enable_insert_overwrite = 'True' enable_external_table_drop = 'True' if 'aws' in config and",
"sys.exit(os.EX_CONFIG) if 'STAGING_DB' in config['athena']: staging_db = config['athena']['STAGING_DB'] else: self.logger.error(\"Not able to read",
"an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND,",
"e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) self.logger.fatal(\"Unable to download the file from",
"staging_db, enable_insert_overwrite, enable_external_table_drop class FileReader: def __init__(self, logger, s3_resource): self.logger = logger self.s3_resource",
"= \"s3://\" S3A = \"s3a://\" class ConfigReader: def __init__(self, logger, config_location): self.config_location =",
"to download the file from s3 to local : {0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR) except",
"prefix, destination_location) except RetriesExceededError as e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) self.logger.fatal(\"Unable",
"file {0}\".format(e)) self.logger.fatal(\"Unable to download the file from s3 to local : {0}/{1}\".format(s3_bucket,",
"destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location) except RetriesExceededError as e: self.logger.fatal(\"Unable to download the",
"to local : {0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR) except TransferNotDoneError as e: self.logger.fatal(\"Unable to download",
"import os import sys S3 = \"s3://\" S3A = \"s3a://\" class ConfigReader: def",
"\") sys.exit(os.EX_CONFIG) if 'STAGING_DB' in config['athena']: staging_db = config['athena']['STAGING_DB'] else: self.logger.error(\"Not able to",
"= s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\") s3_bucket = path_parts.pop(0) prefix = \"/\".join(path_parts) return s3_bucket, prefix",
"except TransferNotDoneError as e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) sys.exit(os.EX_OSERR) return destination_location",
"this file # to you under the Apache License, Version 2.0 (the #",
"# # Unless required by applicable law or agreed to in writing, #",
"import TransferNotDoneError import os import sys S3 = \"s3://\" S3A = \"s3a://\" class",
"class FileReader: def __init__(self, logger, s3_resource): self.logger = logger self.s3_resource = s3_resource def",
"Version 2.0 (the # \"License\"); you may not use this file except in",
"for the # specific language governing permissions and limitations # under the License.",
"aws_region = config['aws']['region'] else: self.logger.error(\"Not able to read the region from the config",
"prefix)) sys.exit(os.EX_DATAERR) except TransferNotDoneError as e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) sys.exit(os.EX_OSERR)",
"if 'ATHENA_OUTPUT_LOCATION' in config['athena']: athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not able to read the",
"OF ANY # KIND, either express or implied. See the License for the",
"BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied.",
"athena config\") sys.exit(os.EX_CONFIG) return aws_region, athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop class FileReader: def __init__(self,",
"def __init__(self, logger, s3_resource): self.logger = logger self.s3_resource = s3_resource def split_s3_path(self, s3_location):",
"to read the STAGING_DB from the config \") sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE' in config['athena']:",
"= config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']: enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not able to",
"sys.exit(os.EX_CONFIG) return aws_region, athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop class FileReader: def __init__(self, logger, s3_resource):",
"prefix, destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location) except RetriesExceededError as e: self.logger.fatal(\"Unable to download",
"License, Version 2.0 (the # \"License\"); you may not use this file except",
"from s3transfer import RetriesExceededError from s3transfer.exceptions import TransferNotDoneError import os import sys S3",
"this file except in compliance # with the License. You may obtain a",
"def __init__(self, logger, config_location): self.config_location = config_location self.logger = logger def read_config(self): config",
"def download_input_from_s3(self, s3_bucket, prefix, destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location) except RetriesExceededError as e:",
"logger, config_location): self.config_location = config_location self.logger = logger def read_config(self): config = ConfigParser()",
"s3transfer import RetriesExceededError from s3transfer.exceptions import TransferNotDoneError import os import sys S3 =",
"the file from s3 to local : {0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR) except TransferNotDoneError as",
"may not use this file except in compliance # with the License. You",
"to download the file {0}\".format(e)) self.logger.fatal(\"Unable to download the file from s3 to",
"= config['athena']['STAGING_DB'] else: self.logger.error(\"Not able to read the STAGING_DB from the config \")",
"ASF licenses this file # to you under the Apache License, Version 2.0",
"self.logger.fatal(\"Unable to download the file from s3 to local : {0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR)",
"else: self.logger.error(\"Not able to read the STAGING_DB from the config \") sys.exit(os.EX_CONFIG) if",
"\"s3a://\" class ConfigReader: def __init__(self, logger, config_location): self.config_location = config_location self.logger = logger",
"# distributed with this work for additional information # regarding copyright ownership. The",
"\"s3://\" S3A = \"s3a://\" class ConfigReader: def __init__(self, logger, config_location): self.config_location = config_location",
"to read the region from the config \") sys.exit(os.EX_CONFIG) if 'athena' in config:",
"on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #",
"import sys S3 = \"s3://\" S3A = \"s3a://\" class ConfigReader: def __init__(self, logger,",
"config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']: enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not able to read",
"with this work for additional information # regarding copyright ownership. The ASF licenses",
"try: self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location) except RetriesExceededError as e: self.logger.fatal(\"Unable to download the file",
"the config \") sys.exit(os.EX_CONFIG) if 'STAGING_DB' in config['athena']: staging_db = config['athena']['STAGING_DB'] else: self.logger.error(\"Not",
"the License. You may obtain a copy of the License at # #",
"'STAGING_DB' in config['athena']: staging_db = config['athena']['STAGING_DB'] else: self.logger.error(\"Not able to read the STAGING_DB",
"'ENABLE_INSERT_OVERWRITE' in config['athena']: enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']: enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE']",
"if 'aws' in config and 'region' in config['aws']: aws_region = config['aws']['region'] else: self.logger.error(\"Not",
"agreements. See the NOTICE file # distributed with this work for additional information",
"\"/\".join(path_parts) return s3_bucket, prefix def download_input_from_s3(self, s3_bucket, prefix, destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location)",
"writing, # software distributed under the License is distributed on an # \"AS",
"under the License. from configparser import ConfigParser from s3transfer import RetriesExceededError from s3transfer.exceptions",
"destination_location): if source_location.startswith(S3) or source_location.startswith(S3A): self.logger.info(\"Downloading the {0} from {1} to {2}\".format(file_type, source_location,",
"s3_bucket, prefix def download_input_from_s3(self, s3_bucket, prefix, destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location) except RetriesExceededError",
"NOTICE file # distributed with this work for additional information # regarding copyright",
"if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']: enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not able to read the",
"TransferNotDoneError as e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) sys.exit(os.EX_OSERR) return destination_location def",
"permissions and limitations # under the License. from configparser import ConfigParser from s3transfer",
"from the config \") sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE' in config['athena']: enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE'] if",
"{0}\".format(e)) self.logger.fatal(\"Unable to download the file from s3 to local : {0}/{1}\".format(s3_bucket, prefix))",
"{1} to {2}\".format(file_type, source_location, destination_location)) s3_bucket, prefix = self.split_s3_path(source_location) return self.download_input_from_s3(s3_bucket, prefix, destination_location)",
"the Apache License, Version 2.0 (the # \"License\"); you may not use this",
"the ATHENA_OUTPUT_LOCATION from the config \") sys.exit(os.EX_CONFIG) if 'STAGING_DB' in config['athena']: staging_db =",
"S3A = \"s3a://\" class ConfigReader: def __init__(self, logger, config_location): self.config_location = config_location self.logger",
"source_location.startswith(S3) or source_location.startswith(S3A): self.logger.info(\"Downloading the {0} from {1} to {2}\".format(file_type, source_location, destination_location)) s3_bucket,",
"The ASF licenses this file # to you under the Apache License, Version",
"file except in compliance # with the License. You may obtain a copy",
"RetriesExceededError as e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) self.logger.fatal(\"Unable to download the",
"config['athena']['STAGING_DB'] else: self.logger.error(\"Not able to read the STAGING_DB from the config \") sys.exit(os.EX_CONFIG)",
"config['athena']: athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not able to read the ATHENA_OUTPUT_LOCATION from the",
"file # to you under the Apache License, Version 2.0 (the # \"License\");",
"in config: if 'ATHENA_OUTPUT_LOCATION' in config['athena']: athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not able to",
"in config['aws']: aws_region = config['aws']['region'] else: self.logger.error(\"Not able to read the region from",
"enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']: enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not able",
"file {0}\".format(e)) sys.exit(os.EX_OSERR) return destination_location def get_file(self,file_type, source_location, destination_location): if source_location.startswith(S3) or source_location.startswith(S3A):",
"config['athena']: staging_db = config['athena']['STAGING_DB'] else: self.logger.error(\"Not able to read the STAGING_DB from the",
"the config \") sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE' in config['athena']: enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP'",
"source_location, destination_location): if source_location.startswith(S3) or source_location.startswith(S3A): self.logger.info(\"Downloading the {0} from {1} to {2}\".format(file_type,",
"(the # \"License\"); you may not use this file except in compliance #",
"to download the file {0}\".format(e)) sys.exit(os.EX_OSERR) return destination_location def get_file(self,file_type, source_location, destination_location): if",
"config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not able to read the ATHENA_OUTPUT_LOCATION from the config \") sys.exit(os.EX_CONFIG)",
"as e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) self.logger.fatal(\"Unable to download the file",
"\") sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE' in config['athena']: enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']:",
"'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']: enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not able to read the athena",
"able to read the region from the config \") sys.exit(os.EX_CONFIG) if 'athena' in",
"{0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR) except TransferNotDoneError as e: self.logger.fatal(\"Unable to download the file {0}\".format(e))",
"if source_location.startswith(S3) or source_location.startswith(S3A): self.logger.info(\"Downloading the {0} from {1} to {2}\".format(file_type, source_location, destination_location))",
"law or agreed to in writing, # software distributed under the License is",
"# software distributed under the License is distributed on an # \"AS IS\"",
"to you under the Apache License, Version 2.0 (the # \"License\"); you may",
"the STAGING_DB from the config \") sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE' in config['athena']: enable_insert_overwrite =",
"from the config \") sys.exit(os.EX_CONFIG) if 'STAGING_DB' in config['athena']: staging_db = config['athena']['STAGING_DB'] else:",
"= ConfigParser() config.read(self.config_location) enable_insert_overwrite = 'True' enable_external_table_drop = 'True' if 'aws' in config",
"file # distributed with this work for additional information # regarding copyright ownership.",
"able to read the STAGING_DB from the config \") sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE' in",
"# Licensed to the Apache Software Foundation (ASF) under one # or more",
"'True' enable_external_table_drop = 'True' if 'aws' in config and 'region' in config['aws']: aws_region",
"language governing permissions and limitations # under the License. from configparser import ConfigParser",
"sys.exit(os.EX_OSERR) return destination_location def get_file(self,file_type, source_location, destination_location): if source_location.startswith(S3) or source_location.startswith(S3A): self.logger.info(\"Downloading the",
"copyright ownership. The ASF licenses this file # to you under the Apache",
"ownership. The ASF licenses this file # to you under the Apache License,",
"License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or",
"ConfigReader: def __init__(self, logger, config_location): self.config_location = config_location self.logger = logger def read_config(self):",
"config: if 'ATHENA_OUTPUT_LOCATION' in config['athena']: athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not able to read",
"s3_resource): self.logger = logger self.s3_resource = s3_resource def split_s3_path(self, s3_location): path_parts = s3_location.replace(S3,",
"config and 'region' in config['aws']: aws_region = config['aws']['region'] else: self.logger.error(\"Not able to read",
"# Unless required by applicable law or agreed to in writing, # software",
"at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed",
"= logger self.s3_resource = s3_resource def split_s3_path(self, s3_location): path_parts = s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\")",
"logger, s3_resource): self.logger = logger self.s3_resource = s3_resource def split_s3_path(self, s3_location): path_parts =",
"to in writing, # software distributed under the License is distributed on an",
"agreed to in writing, # software distributed under the License is distributed on",
"\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express",
"config \") sys.exit(os.EX_CONFIG) if 'STAGING_DB' in config['athena']: staging_db = config['athena']['STAGING_DB'] else: self.logger.error(\"Not able",
"self.config_location = config_location self.logger = logger def read_config(self): config = ConfigParser() config.read(self.config_location) enable_insert_overwrite",
"s3_bucket, prefix, destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location) except RetriesExceededError as e: self.logger.fatal(\"Unable to",
"a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required",
"sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE' in config['athena']: enable_insert_overwrite = config['athena']['ENABLE_INSERT_OVERWRITE'] if 'ENABLE_EXTERNAL_TABLE_DROP' in config['athena']: enable_external_table_drop",
"# under the License. from configparser import ConfigParser from s3transfer import RetriesExceededError from",
"to the Apache Software Foundation (ASF) under one # or more contributor license",
"return aws_region, athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop class FileReader: def __init__(self, logger, s3_resource): self.logger",
"# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in",
"self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location) except RetriesExceededError as e: self.logger.fatal(\"Unable to download the file {0}\".format(e))",
"read the region from the config \") sys.exit(os.EX_CONFIG) if 'athena' in config: if",
"'True' if 'aws' in config and 'region' in config['aws']: aws_region = config['aws']['region'] else:",
"configparser import ConfigParser from s3transfer import RetriesExceededError from s3transfer.exceptions import TransferNotDoneError import os",
"use this file except in compliance # with the License. You may obtain",
"the # specific language governing permissions and limitations # under the License. from",
"staging_db = config['athena']['STAGING_DB'] else: self.logger.error(\"Not able to read the STAGING_DB from the config",
"Software Foundation (ASF) under one # or more contributor license agreements. See the",
"os import sys S3 = \"s3://\" S3A = \"s3a://\" class ConfigReader: def __init__(self,",
"= config['aws']['region'] else: self.logger.error(\"Not able to read the region from the config \")",
"= s3_resource def split_s3_path(self, s3_location): path_parts = s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\") s3_bucket = path_parts.pop(0)",
"the License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR",
"from the config \") sys.exit(os.EX_CONFIG) if 'athena' in config: if 'ATHENA_OUTPUT_LOCATION' in config['athena']:",
"the {0} from {1} to {2}\".format(file_type, source_location, destination_location)) s3_bucket, prefix = self.split_s3_path(source_location) return",
"or source_location.startswith(S3A): self.logger.info(\"Downloading the {0} from {1} to {2}\".format(file_type, source_location, destination_location)) s3_bucket, prefix",
"else: self.logger.error(\"Not able to read the region from the config \") sys.exit(os.EX_CONFIG) if",
"License. from configparser import ConfigParser from s3transfer import RetriesExceededError from s3transfer.exceptions import TransferNotDoneError",
"as e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) sys.exit(os.EX_OSERR) return destination_location def get_file(self,file_type,",
"See the NOTICE file # distributed with this work for additional information #",
": {0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR) except TransferNotDoneError as e: self.logger.fatal(\"Unable to download the file",
"sys.exit(os.EX_DATAERR) except TransferNotDoneError as e: self.logger.fatal(\"Unable to download the file {0}\".format(e)) sys.exit(os.EX_OSERR) return",
"the NOTICE file # distributed with this work for additional information # regarding",
"the region from the config \") sys.exit(os.EX_CONFIG) if 'athena' in config: if 'ATHENA_OUTPUT_LOCATION'",
"in writing, # software distributed under the License is distributed on an #",
"the Apache Software Foundation (ASF) under one # or more contributor license agreements.",
"path_parts.pop(0) prefix = \"/\".join(path_parts) return s3_bucket, prefix def download_input_from_s3(self, s3_bucket, prefix, destination_location): try:",
"to read the athena config\") sys.exit(os.EX_CONFIG) return aws_region, athena_output_location, staging_db, enable_insert_overwrite, enable_external_table_drop class",
"'region' in config['aws']: aws_region = config['aws']['region'] else: self.logger.error(\"Not able to read the region",
"ATHENA_OUTPUT_LOCATION from the config \") sys.exit(os.EX_CONFIG) if 'STAGING_DB' in config['athena']: staging_db = config['athena']['STAGING_DB']",
"enable_insert_overwrite = 'True' enable_external_table_drop = 'True' if 'aws' in config and 'region' in",
"def split_s3_path(self, s3_location): path_parts = s3_location.replace(S3, \"\").replace(S3A, \"\").split(\"/\") s3_bucket = path_parts.pop(0) prefix =",
"file from s3 to local : {0}/{1}\".format(s3_bucket, prefix)) sys.exit(os.EX_DATAERR) except TransferNotDoneError as e:",
"self.logger.error(\"Not able to read the STAGING_DB from the config \") sys.exit(os.EX_CONFIG) if 'ENABLE_INSERT_OVERWRITE'",
"# with the License. You may obtain a copy of the License at",
"def get_file(self,file_type, source_location, destination_location): if source_location.startswith(S3) or source_location.startswith(S3A): self.logger.info(\"Downloading the {0} from {1}",
"prefix def download_input_from_s3(self, s3_bucket, prefix, destination_location): try: self.s3_resource.meta.client.download_file(s3_bucket, prefix, destination_location) except RetriesExceededError as",
"Apache License, Version 2.0 (the # \"License\"); you may not use this file",
"read_config(self): config = ConfigParser() config.read(self.config_location) enable_insert_overwrite = 'True' enable_external_table_drop = 'True' if 'aws'",
"under one # or more contributor license agreements. See the NOTICE file #",
"# to you under the Apache License, Version 2.0 (the # \"License\"); you",
"required by applicable law or agreed to in writing, # software distributed under",
"config = ConfigParser() config.read(self.config_location) enable_insert_overwrite = 'True' enable_external_table_drop = 'True' if 'aws' in",
"by applicable law or agreed to in writing, # software distributed under the",
"for additional information # regarding copyright ownership. The ASF licenses this file #",
"the License for the # specific language governing permissions and limitations # under",
"= config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not able to read the ATHENA_OUTPUT_LOCATION from the config \")",
"applicable law or agreed to in writing, # software distributed under the License",
"= config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not able to read the athena config\") sys.exit(os.EX_CONFIG) return aws_region,",
"'ATHENA_OUTPUT_LOCATION' in config['athena']: athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not able to read the ATHENA_OUTPUT_LOCATION",
"# specific language governing permissions and limitations # under the License. from configparser",
"config['athena']: enable_external_table_drop = config['athena']['ENABLE_INSERT_OVERWRITE'] else: self.logger.error(\"Not able to read the athena config\") sys.exit(os.EX_CONFIG)",
"config['aws']: aws_region = config['aws']['region'] else: self.logger.error(\"Not able to read the region from the",
"in config['athena']: athena_output_location = config['athena']['ATHENA_OUTPUT_LOCATION'] else: self.logger.error(\"Not able to read the ATHENA_OUTPUT_LOCATION from"
] |
[
"{item_code} and disabled = 0 and (expiry_date is null or expiry_date > '{cur_date}')\"\"\"",
"txt, searchfield, start, page_len, filters): return frappe.db.sql(\"\"\"select batch_id, expiry_date from `tabBatch` where item",
"get_batch_nos(doctype, txt, searchfield, start, page_len, filters): return frappe.db.sql(\"\"\"select batch_id, expiry_date from `tabBatch` where",
"where item = {item_code} and disabled = 0 and (expiry_date is null or",
"datetime @frappe.whitelist() def get_batch_nos(doctype, txt, searchfield, start, page_len, filters): return frappe.db.sql(\"\"\"select batch_id, expiry_date",
"is null or expiry_date > '{cur_date}')\"\"\" .format(item_code = frappe.db.escape(filters.get(\"item\")), cur_date = datetime.datetime.today() ))",
"page_len, filters): return frappe.db.sql(\"\"\"select batch_id, expiry_date from `tabBatch` where item = {item_code} and",
"(expiry_date is null or expiry_date > '{cur_date}')\"\"\" .format(item_code = frappe.db.escape(filters.get(\"item\")), cur_date = datetime.datetime.today()",
"and (expiry_date is null or expiry_date > '{cur_date}')\"\"\" .format(item_code = frappe.db.escape(filters.get(\"item\")), cur_date =",
"item = {item_code} and disabled = 0 and (expiry_date is null or expiry_date",
"from `tabBatch` where item = {item_code} and disabled = 0 and (expiry_date is",
"import datetime @frappe.whitelist() def get_batch_nos(doctype, txt, searchfield, start, page_len, filters): return frappe.db.sql(\"\"\"select batch_id,",
"and disabled = 0 and (expiry_date is null or expiry_date > '{cur_date}')\"\"\" .format(item_code",
"batch_id, expiry_date from `tabBatch` where item = {item_code} and disabled = 0 and",
"`tabBatch` where item = {item_code} and disabled = 0 and (expiry_date is null",
"def get_batch_nos(doctype, txt, searchfield, start, page_len, filters): return frappe.db.sql(\"\"\"select batch_id, expiry_date from `tabBatch`",
"disabled = 0 and (expiry_date is null or expiry_date > '{cur_date}')\"\"\" .format(item_code =",
"searchfield, start, page_len, filters): return frappe.db.sql(\"\"\"select batch_id, expiry_date from `tabBatch` where item =",
"@frappe.whitelist() def get_batch_nos(doctype, txt, searchfield, start, page_len, filters): return frappe.db.sql(\"\"\"select batch_id, expiry_date from",
"frappe.db.sql(\"\"\"select batch_id, expiry_date from `tabBatch` where item = {item_code} and disabled = 0",
"0 and (expiry_date is null or expiry_date > '{cur_date}')\"\"\" .format(item_code = frappe.db.escape(filters.get(\"item\")), cur_date",
"= {item_code} and disabled = 0 and (expiry_date is null or expiry_date >",
"start, page_len, filters): return frappe.db.sql(\"\"\"select batch_id, expiry_date from `tabBatch` where item = {item_code}",
"return frappe.db.sql(\"\"\"select batch_id, expiry_date from `tabBatch` where item = {item_code} and disabled =",
"expiry_date from `tabBatch` where item = {item_code} and disabled = 0 and (expiry_date",
"frappe import datetime @frappe.whitelist() def get_batch_nos(doctype, txt, searchfield, start, page_len, filters): return frappe.db.sql(\"\"\"select",
"= 0 and (expiry_date is null or expiry_date > '{cur_date}')\"\"\" .format(item_code = frappe.db.escape(filters.get(\"item\")),",
"import frappe import datetime @frappe.whitelist() def get_batch_nos(doctype, txt, searchfield, start, page_len, filters): return",
"filters): return frappe.db.sql(\"\"\"select batch_id, expiry_date from `tabBatch` where item = {item_code} and disabled"
] |
[
"return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class ClosedTrancations(ViewMixin): model = Transaction name = _(\"Closed transactions\") slug",
"class TransactionModule(Module): model = Transaction default = True create = TransactionCreateView update =",
"from django import forms from django.utils.translation import ugettext_lazy as _ from djangobmf.dashboards import",
"from .models import ACCOUNTING_EXPENSE from .models import ACCOUNTING_ASSET from .models import ACCOUNTING_LIABILITY from",
"TransactionUpdateView @register(dashboard=Accounting) class TransactionItemModule(Module): model = TransactionItem default = True site.register_settings('bmfcontrib_accounting', { 'income':",
"from __future__ import unicode_literals from django import forms from django.utils.translation import ugettext_lazy as",
"djangobmf.sites import site from .categories import TransactionCategory from .models import ACCOUNTING_INCOME from .models",
"import TransactionCreateView from .views import TransactionUpdateView @register(dashboard=Accounting) class AccountModule(Module): model = Account default",
"return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class Archive(ViewMixin): model = TransactionItem name = _(\"Transaction archive\") slug",
"Transaction from .models import TransactionItem from .views import TransactionCreateView from .views import TransactionUpdateView",
"import register from djangobmf.sites import site from .categories import TransactionCategory from .models import",
"TransactionItem default = True site.register_settings('bmfcontrib_accounting', { 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier':",
"model = Account name = _(\"All Accounts\") slug = \"accounts\" @register(category=TransactionCategory) class OpenTransactions(ViewMixin):",
"_(\"Closed transactions\") slug = \"closed\" date_resolution = \"month\" def filter_queryset(self, request, queryset, view):",
"class Archive(ViewMixin): model = TransactionItem name = _(\"Transaction archive\") slug = \"archive\" date_resolution",
"model = Account default = True @register(dashboard=Accounting) class TransactionModule(Module): model = Transaction default",
"Transaction name = _(\"Open transactions\") slug = \"open\" def filter_queryset(self, request, queryset, view):",
"OpenTransactions(ViewMixin): model = Transaction name = _(\"Open transactions\") slug = \"open\" def filter_queryset(self,",
"_(\"Open transactions\") slug = \"open\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory)",
".views import TransactionUpdateView @register(dashboard=Accounting) class AccountModule(Module): model = Account default = True @register(dashboard=Accounting)",
"True create = TransactionCreateView update = TransactionUpdateView @register(dashboard=Accounting) class TransactionItemModule(Module): model = TransactionItem",
"from .models import ACCOUNTING_LIABILITY from .models import Account from .models import Transaction from",
"Transaction name = _(\"Closed transactions\") slug = \"closed\" date_resolution = \"month\" def filter_queryset(self,",
"django.utils.translation import ugettext_lazy as _ from djangobmf.dashboards import Accounting from djangobmf.sites import Module",
"= True @register(dashboard=Accounting) class TransactionModule(Module): model = Transaction default = True create =",
"from .categories import TransactionCategory from .models import ACCOUNTING_INCOME from .models import ACCOUNTING_EXPENSE from",
"Transaction default = True create = TransactionCreateView update = TransactionUpdateView @register(dashboard=Accounting) class TransactionItemModule(Module):",
"djangobmf.sites import register from djangobmf.sites import site from .categories import TransactionCategory from .models",
"ClosedTrancations(ViewMixin): model = Transaction name = _(\"Closed transactions\") slug = \"closed\" date_resolution =",
"ugettext_lazy as _ from djangobmf.dashboards import Accounting from djangobmf.sites import Module from djangobmf.sites",
"ACCOUNTING_LIABILITY from .models import Account from .models import Transaction from .models import TransactionItem",
"djangobmf.sites import ViewMixin from djangobmf.sites import register from djangobmf.sites import site from .categories",
"{ 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory) class AllAccounts(ViewMixin):",
".models import ACCOUNTING_LIABILITY from .models import Account from .models import Transaction from .models",
"\"open\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class ClosedTrancations(ViewMixin): model =",
"from djangobmf.dashboards import Accounting from djangobmf.sites import Module from djangobmf.sites import ViewMixin from",
"\"month\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class Archive(ViewMixin): model =",
"= Account name = _(\"All Accounts\") slug = \"accounts\" @register(category=TransactionCategory) class OpenTransactions(ViewMixin): model",
"site from .categories import TransactionCategory from .models import ACCOUNTING_INCOME from .models import ACCOUNTING_EXPENSE",
"import ACCOUNTING_EXPENSE from .models import ACCOUNTING_ASSET from .models import ACCOUNTING_LIABILITY from .models import",
"Account name = _(\"All Accounts\") slug = \"accounts\" @register(category=TransactionCategory) class OpenTransactions(ViewMixin): model =",
"TransactionCreateView update = TransactionUpdateView @register(dashboard=Accounting) class TransactionItemModule(Module): model = TransactionItem default = True",
"class OpenTransactions(ViewMixin): model = Transaction name = _(\"Open transactions\") slug = \"open\" def",
"= Transaction name = _(\"Open transactions\") slug = \"open\" def filter_queryset(self, request, queryset,",
"'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory) class AllAccounts(ViewMixin): model",
"TransactionCategory from .models import ACCOUNTING_INCOME from .models import ACCOUNTING_EXPENSE from .models import ACCOUNTING_ASSET",
"model = TransactionItem default = True site.register_settings('bmfcontrib_accounting', { 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer':",
"filter_queryset(self, request, queryset, view): return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class Archive(ViewMixin): model = TransactionItem name",
"import Account from .models import Transaction from .models import TransactionItem from .views import",
"from .models import Transaction from .models import TransactionItem from .views import TransactionCreateView from",
"class TransactionItemModule(Module): model = TransactionItem default = True site.register_settings('bmfcontrib_accounting', { 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense':",
"from .models import TransactionItem from .views import TransactionCreateView from .views import TransactionUpdateView @register(dashboard=Accounting)",
"TransactionItem from .views import TransactionCreateView from .views import TransactionUpdateView @register(dashboard=Accounting) class AccountModule(Module): model",
".views import TransactionCreateView from .views import TransactionUpdateView @register(dashboard=Accounting) class AccountModule(Module): model = Account",
"\"closed\" date_resolution = \"month\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class",
"def filter_queryset(self, request, queryset, view): return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class Archive(ViewMixin): model = TransactionItem",
"view): return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class Archive(ViewMixin): model = TransactionItem name = _(\"Transaction archive\")",
"_ from djangobmf.dashboards import Accounting from djangobmf.sites import Module from djangobmf.sites import ViewMixin",
"queryset, view): return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class Archive(ViewMixin): model = TransactionItem name = _(\"Transaction",
"def filter_queryset(self, request, queryset, view): return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class ClosedTrancations(ViewMixin): model = Transaction",
".models import Transaction from .models import TransactionItem from .views import TransactionCreateView from .views",
"default = True @register(dashboard=Accounting) class TransactionModule(Module): model = Transaction default = True create",
"django import forms from django.utils.translation import ugettext_lazy as _ from djangobmf.dashboards import Accounting",
"default = True create = TransactionCreateView update = TransactionUpdateView @register(dashboard=Accounting) class TransactionItemModule(Module): model",
"= True site.register_settings('bmfcontrib_accounting', { 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), })",
"view): return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class ClosedTrancations(ViewMixin): model = Transaction name = _(\"Closed transactions\")",
"slug = \"closed\" date_resolution = \"month\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=False).order_by('modified')",
"name = _(\"Open transactions\") slug = \"open\" def filter_queryset(self, request, queryset, view): return",
"from .models import ACCOUNTING_INCOME from .models import ACCOUNTING_EXPENSE from .models import ACCOUNTING_ASSET from",
"import ACCOUNTING_INCOME from .models import ACCOUNTING_EXPENSE from .models import ACCOUNTING_ASSET from .models import",
"ACCOUNTING_ASSET from .models import ACCOUNTING_LIABILITY from .models import Account from .models import Transaction",
"import TransactionItem from .views import TransactionCreateView from .views import TransactionUpdateView @register(dashboard=Accounting) class AccountModule(Module):",
"date_resolution = \"month\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class Archive(ViewMixin):",
"AllAccounts(ViewMixin): model = Account name = _(\"All Accounts\") slug = \"accounts\" @register(category=TransactionCategory) class",
"= \"accounts\" @register(category=TransactionCategory) class OpenTransactions(ViewMixin): model = Transaction name = _(\"Open transactions\") slug",
"TransactionModule(Module): model = Transaction default = True create = TransactionCreateView update = TransactionUpdateView",
"name = _(\"All Accounts\") slug = \"accounts\" @register(category=TransactionCategory) class OpenTransactions(ViewMixin): model = Transaction",
"= Transaction default = True create = TransactionCreateView update = TransactionUpdateView @register(dashboard=Accounting) class",
"queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class ClosedTrancations(ViewMixin): model = Transaction name = _(\"Closed transactions\") slug =",
"ACCOUNTING_EXPENSE from .models import ACCOUNTING_ASSET from .models import ACCOUNTING_LIABILITY from .models import Account",
"import TransactionUpdateView @register(dashboard=Accounting) class AccountModule(Module): model = Account default = True @register(dashboard=Accounting) class",
"from djangobmf.sites import Module from djangobmf.sites import ViewMixin from djangobmf.sites import register from",
"TransactionUpdateView @register(dashboard=Accounting) class AccountModule(Module): model = Account default = True @register(dashboard=Accounting) class TransactionModule(Module):",
"@register(dashboard=Accounting) class TransactionModule(Module): model = Transaction default = True create = TransactionCreateView update",
"from djangobmf.sites import register from djangobmf.sites import site from .categories import TransactionCategory from",
"_(\"All Accounts\") slug = \"accounts\" @register(category=TransactionCategory) class OpenTransactions(ViewMixin): model = Transaction name =",
".models import ACCOUNTING_EXPENSE from .models import ACCOUNTING_ASSET from .models import ACCOUNTING_LIABILITY from .models",
"= True create = TransactionCreateView update = TransactionUpdateView @register(dashboard=Accounting) class TransactionItemModule(Module): model =",
"queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class Archive(ViewMixin): model = TransactionItem name = _(\"Transaction archive\") slug =",
".categories import TransactionCategory from .models import ACCOUNTING_INCOME from .models import ACCOUNTING_EXPENSE from .models",
"update = TransactionUpdateView @register(dashboard=Accounting) class TransactionItemModule(Module): model = TransactionItem default = True site.register_settings('bmfcontrib_accounting',",
"= \"open\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class ClosedTrancations(ViewMixin): model",
"default = True site.register_settings('bmfcontrib_accounting', { 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)),",
"class ClosedTrancations(ViewMixin): model = Transaction name = _(\"Closed transactions\") slug = \"closed\" date_resolution",
"= TransactionUpdateView @register(dashboard=Accounting) class TransactionItemModule(Module): model = TransactionItem default = True site.register_settings('bmfcontrib_accounting', {",
"djangobmf.sites import Module from djangobmf.sites import ViewMixin from djangobmf.sites import register from djangobmf.sites",
"import site from .categories import TransactionCategory from .models import ACCOUNTING_INCOME from .models import",
"from .models import ACCOUNTING_ASSET from .models import ACCOUNTING_LIABILITY from .models import Account from",
"'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory) class AllAccounts(ViewMixin): model = Account",
"= TransactionItem default = True site.register_settings('bmfcontrib_accounting', { 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)),",
"request, queryset, view): return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class Archive(ViewMixin): model = TransactionItem name =",
"djangobmf.dashboards import Accounting from djangobmf.sites import Module from djangobmf.sites import ViewMixin from djangobmf.sites",
"TransactionItemModule(Module): model = TransactionItem default = True site.register_settings('bmfcontrib_accounting', { 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)),",
"slug = \"open\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class ClosedTrancations(ViewMixin):",
"from djangobmf.sites import site from .categories import TransactionCategory from .models import ACCOUNTING_INCOME from",
"= TransactionCreateView update = TransactionUpdateView @register(dashboard=Accounting) class TransactionItemModule(Module): model = TransactionItem default =",
"= _(\"Closed transactions\") slug = \"closed\" date_resolution = \"month\" def filter_queryset(self, request, queryset,",
"fileencoding=utf-8: from __future__ import unicode_literals from django import forms from django.utils.translation import ugettext_lazy",
".models import Account from .models import Transaction from .models import TransactionItem from .views",
"True @register(dashboard=Accounting) class TransactionModule(Module): model = Transaction default = True create = TransactionCreateView",
"import ugettext_lazy as _ from djangobmf.dashboards import Accounting from djangobmf.sites import Module from",
"import TransactionCategory from .models import ACCOUNTING_INCOME from .models import ACCOUNTING_EXPENSE from .models import",
"__future__ import unicode_literals from django import forms from django.utils.translation import ugettext_lazy as _",
"= _(\"Open transactions\") slug = \"open\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=True).order_by('-modified')",
"model = TransactionItem name = _(\"Transaction archive\") slug = \"archive\" date_resolution = \"week\"",
"forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory) class AllAccounts(ViewMixin): model = Account name = _(\"All Accounts\") slug",
"\"accounts\" @register(category=TransactionCategory) class OpenTransactions(ViewMixin): model = Transaction name = _(\"Open transactions\") slug =",
"# ex:set fileencoding=utf-8: from __future__ import unicode_literals from django import forms from django.utils.translation",
"forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory) class AllAccounts(ViewMixin): model = Account name = _(\"All",
"#!/usr/bin/python # ex:set fileencoding=utf-8: from __future__ import unicode_literals from django import forms from",
"'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory) class AllAccounts(ViewMixin): model = Account name =",
"request, queryset, view): return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class ClosedTrancations(ViewMixin): model = Transaction name =",
"slug = \"accounts\" @register(category=TransactionCategory) class OpenTransactions(ViewMixin): model = Transaction name = _(\"Open transactions\")",
"import Accounting from djangobmf.sites import Module from djangobmf.sites import ViewMixin from djangobmf.sites import",
"Accounting from djangobmf.sites import Module from djangobmf.sites import ViewMixin from djangobmf.sites import register",
"@register(category=TransactionCategory) class Archive(ViewMixin): model = TransactionItem name = _(\"Transaction archive\") slug = \"archive\"",
"Archive(ViewMixin): model = TransactionItem name = _(\"Transaction archive\") slug = \"archive\" date_resolution =",
"= Account default = True @register(dashboard=Accounting) class TransactionModule(Module): model = Transaction default =",
"'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory) class AllAccounts(ViewMixin): model = Account name = _(\"All Accounts\")",
"import Module from djangobmf.sites import ViewMixin from djangobmf.sites import register from djangobmf.sites import",
"= \"closed\" date_resolution = \"month\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory)",
"ViewMixin from djangobmf.sites import register from djangobmf.sites import site from .categories import TransactionCategory",
"@register(category=TransactionCategory) class AllAccounts(ViewMixin): model = Account name = _(\"All Accounts\") slug = \"accounts\"",
"import ViewMixin from djangobmf.sites import register from djangobmf.sites import site from .categories import",
"forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory) class AllAccounts(ViewMixin): model =",
"@register(category=TransactionCategory) class ClosedTrancations(ViewMixin): model = Transaction name = _(\"Closed transactions\") slug = \"closed\"",
".models import ACCOUNTING_INCOME from .models import ACCOUNTING_EXPENSE from .models import ACCOUNTING_ASSET from .models",
"register from djangobmf.sites import site from .categories import TransactionCategory from .models import ACCOUNTING_INCOME",
"Account default = True @register(dashboard=Accounting) class TransactionModule(Module): model = Transaction default = True",
"ACCOUNTING_INCOME from .models import ACCOUNTING_EXPENSE from .models import ACCOUNTING_ASSET from .models import ACCOUNTING_LIABILITY",
"site.register_settings('bmfcontrib_accounting', { 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory) class",
".models import ACCOUNTING_ASSET from .models import ACCOUNTING_LIABILITY from .models import Account from .models",
"import ACCOUNTING_ASSET from .models import ACCOUNTING_LIABILITY from .models import Account from .models import",
"queryset, view): return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class ClosedTrancations(ViewMixin): model = Transaction name = _(\"Closed",
"model = Transaction name = _(\"Closed transactions\") slug = \"closed\" date_resolution = \"month\"",
"True site.register_settings('bmfcontrib_accounting', { 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)), 'expense': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory)",
".models import TransactionItem from .views import TransactionCreateView from .views import TransactionUpdateView @register(dashboard=Accounting) class",
"as _ from djangobmf.dashboards import Accounting from djangobmf.sites import Module from djangobmf.sites import",
"transactions\") slug = \"closed\" date_resolution = \"month\" def filter_queryset(self, request, queryset, view): return",
"name = _(\"Closed transactions\") slug = \"closed\" date_resolution = \"month\" def filter_queryset(self, request,",
"forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_EXPENSE)), 'customer': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_ASSET)), 'supplier': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_LIABILITY)), }) @register(category=TransactionCategory) class AllAccounts(ViewMixin): model = Account name",
"import forms from django.utils.translation import ugettext_lazy as _ from djangobmf.dashboards import Accounting from",
"import Transaction from .models import TransactionItem from .views import TransactionCreateView from .views import",
"forms from django.utils.translation import ugettext_lazy as _ from djangobmf.dashboards import Accounting from djangobmf.sites",
"@register(dashboard=Accounting) class TransactionItemModule(Module): model = TransactionItem default = True site.register_settings('bmfcontrib_accounting', { 'income': forms.ModelChoiceField(queryset=Account.objects.filter(type=ACCOUNTING_INCOME)),",
"from djangobmf.sites import ViewMixin from djangobmf.sites import register from djangobmf.sites import site from",
"= Transaction name = _(\"Closed transactions\") slug = \"closed\" date_resolution = \"month\" def",
"= \"month\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=False).order_by('modified') @register(category=TransactionCategory) class Archive(ViewMixin): model",
"Accounts\") slug = \"accounts\" @register(category=TransactionCategory) class OpenTransactions(ViewMixin): model = Transaction name = _(\"Open",
"from .views import TransactionUpdateView @register(dashboard=Accounting) class AccountModule(Module): model = Account default = True",
"@register(dashboard=Accounting) class AccountModule(Module): model = Account default = True @register(dashboard=Accounting) class TransactionModule(Module): model",
"AccountModule(Module): model = Account default = True @register(dashboard=Accounting) class TransactionModule(Module): model = Transaction",
"create = TransactionCreateView update = TransactionUpdateView @register(dashboard=Accounting) class TransactionItemModule(Module): model = TransactionItem default",
"ex:set fileencoding=utf-8: from __future__ import unicode_literals from django import forms from django.utils.translation import",
"import ACCOUNTING_LIABILITY from .models import Account from .models import Transaction from .models import",
"from .models import Account from .models import Transaction from .models import TransactionItem from",
"model = Transaction name = _(\"Open transactions\") slug = \"open\" def filter_queryset(self, request,",
"unicode_literals from django import forms from django.utils.translation import ugettext_lazy as _ from djangobmf.dashboards",
"from django.utils.translation import ugettext_lazy as _ from djangobmf.dashboards import Accounting from djangobmf.sites import",
"import unicode_literals from django import forms from django.utils.translation import ugettext_lazy as _ from",
"Module from djangobmf.sites import ViewMixin from djangobmf.sites import register from djangobmf.sites import site",
"Account from .models import Transaction from .models import TransactionItem from .views import TransactionCreateView",
"from .views import TransactionCreateView from .views import TransactionUpdateView @register(dashboard=Accounting) class AccountModule(Module): model =",
"model = Transaction default = True create = TransactionCreateView update = TransactionUpdateView @register(dashboard=Accounting)",
"TransactionCreateView from .views import TransactionUpdateView @register(dashboard=Accounting) class AccountModule(Module): model = Account default =",
"filter_queryset(self, request, queryset, view): return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class ClosedTrancations(ViewMixin): model = Transaction name",
"= _(\"All Accounts\") slug = \"accounts\" @register(category=TransactionCategory) class OpenTransactions(ViewMixin): model = Transaction name",
"class AllAccounts(ViewMixin): model = Account name = _(\"All Accounts\") slug = \"accounts\" @register(category=TransactionCategory)",
"}) @register(category=TransactionCategory) class AllAccounts(ViewMixin): model = Account name = _(\"All Accounts\") slug =",
"class AccountModule(Module): model = Account default = True @register(dashboard=Accounting) class TransactionModule(Module): model =",
"@register(category=TransactionCategory) class OpenTransactions(ViewMixin): model = Transaction name = _(\"Open transactions\") slug = \"open\"",
"transactions\") slug = \"open\" def filter_queryset(self, request, queryset, view): return queryset.filter(draft=True).order_by('-modified') @register(category=TransactionCategory) class"
] |
[
"%(name)-30s %(levelname)-8s %(message)s' ) def create_log(name): '''Create a log and elevate it to",
"to the INFO level. format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s' ) def create_log(name): '''Create a",
"the INFO level. format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s' ) def create_log(name): '''Create a log",
"level options supported in configuration. LEVEL_OPTIONS = list(( 'notset', 'debug', 'info', 'warning', 'error',",
"= logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log return create_log # Define the callable that can",
"loggers to the INFO level. format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s' ) def create_log(name): '''Create",
"Define the callable that can be used to create properly configured loggers. logger",
"import logging from .config import config # The level options supported in configuration.",
"Configure 3rd party loggers to the INFO level. format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s' )",
"that can be used to create properly configured loggers. logger = _setup_logger_supply() #",
"log return create_log # Define the callable that can be used to create",
"configured level.''' log = logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log return create_log # Define the",
"return log return create_log # Define the callable that can be used to",
"# The level options supported in configuration. LEVEL_OPTIONS = list(( 'notset', 'debug', 'info',",
"a logger generator.''' configured_level = config.development.log_level # Perform basic configuration. logging.basicConfig( level=20, #",
"%(levelname)-8s %(message)s' ) def create_log(name): '''Create a log and elevate it to the",
"be used to create properly configured loggers. logger = _setup_logger_supply() # pylint: disable=invalid-name",
"callable that can be used to create properly configured loggers. logger = _setup_logger_supply()",
"log and elevate it to the configured level.''' log = logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return",
"level.''' log = logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log return create_log # Define the callable",
"'''Create a log and elevate it to the configured level.''' log = logging.getLogger(name)",
"party loggers to the INFO level. format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s' ) def create_log(name):",
"logger factory.''' import logging from .config import config # The level options supported",
"config.development.log_level # Perform basic configuration. logging.basicConfig( level=20, # Configure 3rd party loggers to",
"configuration. logging.basicConfig( level=20, # Configure 3rd party loggers to the INFO level. format='%(asctime)-10s",
"factory.''' import logging from .config import config # The level options supported in",
"the configured level.''' log = logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log return create_log # Define",
"elevate it to the configured level.''' log = logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log return",
"'critical' )) def _setup_logger_supply(): '''Create and return a logger generator.''' configured_level = config.development.log_level",
"and elevate it to the configured level.''' log = logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log",
"= config.development.log_level # Perform basic configuration. logging.basicConfig( level=20, # Configure 3rd party loggers",
".config import config # The level options supported in configuration. LEVEL_OPTIONS = list((",
"3rd party loggers to the INFO level. format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s' ) def",
"log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log return create_log # Define the callable that can be used",
"basic configuration. logging.basicConfig( level=20, # Configure 3rd party loggers to the INFO level.",
"create_log # Define the callable that can be used to create properly configured",
"configuration. LEVEL_OPTIONS = list(( 'notset', 'debug', 'info', 'warning', 'error', 'critical' )) def _setup_logger_supply():",
"log = logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log return create_log # Define the callable that",
"create_log(name): '''Create a log and elevate it to the configured level.''' log =",
"generator.''' configured_level = config.development.log_level # Perform basic configuration. logging.basicConfig( level=20, # Configure 3rd",
"level. format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s' ) def create_log(name): '''Create a log and elevate",
")) def _setup_logger_supply(): '''Create and return a logger generator.''' configured_level = config.development.log_level #",
"level=20, # Configure 3rd party loggers to the INFO level. format='%(asctime)-10s %(name)-30s %(levelname)-8s",
"INFO level. format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s' ) def create_log(name): '''Create a log and",
"LEVEL_OPTIONS = list(( 'notset', 'debug', 'info', 'warning', 'error', 'critical' )) def _setup_logger_supply(): '''Create",
"'notset', 'debug', 'info', 'warning', 'error', 'critical' )) def _setup_logger_supply(): '''Create and return a",
"format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s' ) def create_log(name): '''Create a log and elevate it",
"_setup_logger_supply(): '''Create and return a logger generator.''' configured_level = config.development.log_level # Perform basic",
"list(( 'notset', 'debug', 'info', 'warning', 'error', 'critical' )) def _setup_logger_supply(): '''Create and return",
"utf-8 '''Centralized logger factory.''' import logging from .config import config # The level",
"'''Centralized logger factory.''' import logging from .config import config # The level options",
"logging.basicConfig( level=20, # Configure 3rd party loggers to the INFO level. format='%(asctime)-10s %(name)-30s",
"# Configure 3rd party loggers to the INFO level. format='%(asctime)-10s %(name)-30s %(levelname)-8s %(message)s'",
"'error', 'critical' )) def _setup_logger_supply(): '''Create and return a logger generator.''' configured_level =",
"'debug', 'info', 'warning', 'error', 'critical' )) def _setup_logger_supply(): '''Create and return a logger",
"it to the configured level.''' log = logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log return create_log",
"logger generator.''' configured_level = config.development.log_level # Perform basic configuration. logging.basicConfig( level=20, # Configure",
"config # The level options supported in configuration. LEVEL_OPTIONS = list(( 'notset', 'debug',",
"to the configured level.''' log = logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log return create_log #",
"The level options supported in configuration. LEVEL_OPTIONS = list(( 'notset', 'debug', 'info', 'warning',",
"a log and elevate it to the configured level.''' log = logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10)",
"in configuration. LEVEL_OPTIONS = list(( 'notset', 'debug', 'info', 'warning', 'error', 'critical' )) def",
"return create_log # Define the callable that can be used to create properly",
"'info', 'warning', 'error', 'critical' )) def _setup_logger_supply(): '''Create and return a logger generator.'''",
"configured_level = config.development.log_level # Perform basic configuration. logging.basicConfig( level=20, # Configure 3rd party",
"%(message)s' ) def create_log(name): '''Create a log and elevate it to the configured",
"and return a logger generator.''' configured_level = config.development.log_level # Perform basic configuration. logging.basicConfig(",
"def create_log(name): '''Create a log and elevate it to the configured level.''' log",
"options supported in configuration. LEVEL_OPTIONS = list(( 'notset', 'debug', 'info', 'warning', 'error', 'critical'",
"from .config import config # The level options supported in configuration. LEVEL_OPTIONS =",
"return a logger generator.''' configured_level = config.development.log_level # Perform basic configuration. logging.basicConfig( level=20,",
"'''Create and return a logger generator.''' configured_level = config.development.log_level # Perform basic configuration.",
"supported in configuration. LEVEL_OPTIONS = list(( 'notset', 'debug', 'info', 'warning', 'error', 'critical' ))",
") def create_log(name): '''Create a log and elevate it to the configured level.'''",
"# Perform basic configuration. logging.basicConfig( level=20, # Configure 3rd party loggers to the",
"Perform basic configuration. logging.basicConfig( level=20, # Configure 3rd party loggers to the INFO",
"coding: utf-8 '''Centralized logger factory.''' import logging from .config import config # The",
"the callable that can be used to create properly configured loggers. logger =",
"# coding: utf-8 '''Centralized logger factory.''' import logging from .config import config #",
"import config # The level options supported in configuration. LEVEL_OPTIONS = list(( 'notset',",
"logging from .config import config # The level options supported in configuration. LEVEL_OPTIONS",
"logging.getLogger(name) log.setLevel(LEVEL_OPTIONS.index(configured_level)*10) return log return create_log # Define the callable that can be",
"def _setup_logger_supply(): '''Create and return a logger generator.''' configured_level = config.development.log_level # Perform",
"can be used to create properly configured loggers. logger = _setup_logger_supply() # pylint:",
"'warning', 'error', 'critical' )) def _setup_logger_supply(): '''Create and return a logger generator.''' configured_level",
"= list(( 'notset', 'debug', 'info', 'warning', 'error', 'critical' )) def _setup_logger_supply(): '''Create and",
"# Define the callable that can be used to create properly configured loggers."
] |
[
"c) assert isinstance(d1, np.ndarray) and not isinstance(d1, quaternionic.array) f2 = quaternionic.utilities.type_self_return(f1) d2 =",
"numpy as np import quaternionic import pytest def test_self_return(): def f1(a, b, c):",
"numba.ushort, numba.uintc, numba.uint, numba.ulonglong, numba.uintp, numba.float32, numba.float_, numba.double, numba.complex64, ) assert types_to_ftylist([types]) ==",
"quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args(f1) d2 =",
"quaternionic.utilities.type_self_return(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) def",
"= f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() == 'pypy',",
"isinstance(b, np.ndarray) and isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and isinstance(c, quaternionic.array) assert isinstance(d,",
"3 f3 = quaternionic.utilities.ndarray_args_and_return(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and",
"= f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) def test_ndarray_args(): def",
"and not isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array) assert isinstance(d,",
"f2 = getattr(quaternionic.algebra, k) sig = f2.signature inputs = sig.split('->')[0].split(',') for arg0 in",
"_quaternion_resolution = 10 * np.finfo(float).resolution np.random.seed(1234) one = quaternionic.array(1, 0, 0, 0) x",
"k) f2 = getattr(quaternionic.algebra, k) sig = f2.signature inputs = sig.split('->')[0].split(',') for arg0",
"and isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and",
"on pypy\") def test_types_to_ftylist(): import numba types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types = '?bhilqpBHILQPfdgF->D' ftylist",
"[one, -(1+2*_quaternion_resolution)*one, -one, x] for k in dir(quaternionic.algebra_ufuncs): if not k.startswith('__'): f1 =",
"quaternionic.array.random((7, 13, 4)) y = quaternionic.array.random((13, 4)) z = np.random.rand(13) arg0s = [one,",
"not isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args(f1) d3 = f3(a, b,",
"b, c) assert isinstance(d2, np.ndarray) and not isinstance(d2, quaternionic.array) f1.nin = 3 f3",
"quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) d1 = f1(a, b, c)",
"= f3(a, b, c) assert isinstance(d3, np.ndarray) and not isinstance(d3, quaternionic.array) def test_ndarray_args_and_return():",
"and not isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array) return d",
"inputs[0] == '(n)' else [z,] if len(inputs) > 1: args.append(y.ndarray if inputs[1] ==",
"sys import numpy as np import quaternionic import pytest def test_self_return(): def f1(a,",
"f2 = quaternionic.utilities.type_self_return(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and isinstance(d2,",
"assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) def test_ndarray_args(): def f1(a, b, c): d",
"b = quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args_and_return(f1)",
"np.ndarray) and not isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array) return",
"getattr(quaternionic.algebra_ufuncs, k) f2 = getattr(quaternionic.algebra, k) sig = f2.signature inputs = sig.split('->')[0].split(',') for",
"3 f3 = quaternionic.utilities.ndarray_args(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and",
"f3(a, b, c) assert isinstance(d3, np.ndarray) and not isinstance(d3, quaternionic.array) def test_ndarray_args_and_return(): def",
"quaternionic.array(1, 0, 0, 0) x = quaternionic.array.random((7, 13, 4)) y = quaternionic.array.random((13, 4))",
"= sig.split('->')[0].split(',') for arg0 in arg0s: args = [arg0.ndarray] if inputs[0] == '(n)'",
"b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) def test_ndarray_args(): def f1(a, b,",
"= quaternionic.utilities.ndarray_args_and_return(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array)",
"if inputs[0] == '(n)' else [z,] if len(inputs) > 1: args.append(y.ndarray if inputs[1]",
"= quaternionic.utilities.ndarray_args_and_return(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array)",
"np.ndarray) and not isinstance(d3, quaternionic.array) def test_ndarray_args_and_return(): def f1(a, b, c): d =",
"quaternionic.array) def test_ndarray_args_and_return(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray)",
"b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array) assert",
"not isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray)",
"4)) d1 = f1(a, b, c) assert isinstance(d1, np.ndarray) and not isinstance(d1, quaternionic.array)",
"= quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args(f1) d2",
"quaternionic.utilities.ndarray_args(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and not isinstance(d3, quaternionic.array)",
"isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason=\"No numba on pypy\") def test_types_to_ftylist(): import numba",
"and isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and",
"c) assert isinstance(d2, np.ndarray) and not isinstance(d2, quaternionic.array) f1.nin = 3 f3 =",
"13, 4)) y = quaternionic.array.random((13, 4)) z = np.random.rand(13) arg0s = [one, -(1+2*_quaternion_resolution)*one,",
"else [z,] if len(inputs) > 1: args.append(y.ndarray if inputs[1] == '(n)' else z)",
"= getattr(quaternionic.algebra_ufuncs, k) f2 = getattr(quaternionic.algebra, k) sig = f2.signature inputs = sig.split('->')[0].split(',')",
"isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and isinstance(c,",
"c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray)",
"and isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array) return d a",
"def test_ndarray_args_and_return(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and",
"numba.complex64, ) assert types_to_ftylist([types]) == [ftylist] def test_pyguvectorize(): _quaternion_resolution = 10 * np.finfo(float).resolution",
"quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args_and_return(f1) d2 = f2(a, b, c) assert isinstance(d2,",
"import sys import numpy as np import quaternionic import pytest def test_self_return(): def",
"assert isinstance(d1, np.ndarray) and not isinstance(d1, quaternionic.array) f2 = quaternionic.utilities.type_self_return(f1) d2 = f2(a,",
"args = [arg0.ndarray] if inputs[0] == '(n)' else [z,] if len(inputs) > 1:",
"f1.nin = 3 f3 = quaternionic.utilities.type_self_return(f1) d3 = f3(a, b, c) assert isinstance(d3,",
"f2(a, b, c) assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3 f3",
"not isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray)",
"quaternionic.array) def test_ndarray_args(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray)",
"isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array)",
"a = quaternionic.array.random((17, 3, 4)) b = quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11,",
"test_types_to_ftylist(): import numba types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types = '?bhilqpBHILQPfdgF->D' ftylist = numba.complex128( numba.boolean,",
"d1 = f1(a, b, c) assert isinstance(d1, np.ndarray) and not isinstance(d1, quaternionic.array) f2",
"= [arg0.ndarray] if inputs[0] == '(n)' else [z,] if len(inputs) > 1: args.append(y.ndarray",
"quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason=\"No numba on pypy\") def test_types_to_ftylist(): import numba types_to_ftylist",
"1: args.append(y.ndarray if inputs[1] == '(n)' else z) assert np.allclose( f1(*args), quaternionic.utilities.pyguvectorize(f2.types, f2.signature)(f2)(*args),",
"dir(quaternionic.algebra_ufuncs): if not k.startswith('__'): f1 = getattr(quaternionic.algebra_ufuncs, k) f2 = getattr(quaternionic.algebra, k) sig",
"types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types = '?bhilqpBHILQPfdgF->D' ftylist = numba.complex128( numba.boolean, numba.byte, numba.short, numba.intc,",
"k) sig = f2.signature inputs = sig.split('->')[0].split(',') for arg0 in arg0s: args =",
"0) x = quaternionic.array.random((7, 13, 4)) y = quaternionic.array.random((13, 4)) z = np.random.rand(13)",
"not isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array) return d a",
"types = '?bhilqpBHILQPfdgF->D' ftylist = numba.complex128( numba.boolean, numba.byte, numba.short, numba.intc, numba.int_, numba.longlong, numba.intp,",
"4)) z = np.random.rand(13) arg0s = [one, -(1+2*_quaternion_resolution)*one, -one, x] for k in",
"c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args(f1) d2 = f2(a, b, c)",
"'pypy', reason=\"No numba on pypy\") def test_types_to_ftylist(): import numba types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types",
"d a = quaternionic.array.random((17, 3, 4)) b = quaternionic.array.random((13, 3, 4)) c =",
"reason=\"No numba on pypy\") def test_types_to_ftylist(): import numba types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types =",
"k.startswith('__'): f1 = getattr(quaternionic.algebra_ufuncs, k) f2 = getattr(quaternionic.algebra, k) sig = f2.signature inputs",
"isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array)",
"isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.type_self_return(f1) d3 = f3(a, b, c)",
"np.random.rand(13) arg0s = [one, -(1+2*_quaternion_resolution)*one, -one, x] for k in dir(quaternionic.algebra_ufuncs): if not",
"= quaternionic.utilities.ndarray_args(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and not isinstance(d3,",
"assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and not isinstance(b,",
"quaternionic.array) f2 = quaternionic.utilities.type_self_return(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and",
"arg0 in arg0s: args = [arg0.ndarray] if inputs[0] == '(n)' else [z,] if",
"quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args(f1) d2 = f2(a, b, c) assert isinstance(d2,",
"assert isinstance(d3, np.ndarray) and not isinstance(d3, quaternionic.array) def test_ndarray_args_and_return(): def f1(a, b, c):",
"numba.uint, numba.ulonglong, numba.uintp, numba.float32, numba.float_, numba.double, numba.complex64, ) assert types_to_ftylist([types]) == [ftylist] def",
"= quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args_and_return(f1) d2",
"and isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason=\"No numba on pypy\") def test_types_to_ftylist(): import",
"numba.byte, numba.short, numba.intc, numba.int_, numba.longlong, numba.intp, numba.char, numba.ushort, numba.uintc, numba.uint, numba.ulonglong, numba.uintp, numba.float32,",
"isinstance(d3, quaternionic.array) def test_ndarray_args(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a,",
"= quaternionic.array.random((11, 3, 4)) d1 = f1(a, b, c) assert isinstance(d1, np.ndarray) and",
"np.ndarray) and not isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array) assert",
"3, 4)) c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args_and_return(f1) d2 = f2(a,",
"f1(a, b, c) assert isinstance(d1, np.ndarray) and not isinstance(d1, quaternionic.array) f2 = quaternionic.utilities.type_self_return(f1)",
"not isinstance(d3, quaternionic.array) def test_ndarray_args_and_return(): def f1(a, b, c): d = np.asarray(a).copy() assert",
"numba.uintc, numba.uint, numba.ulonglong, numba.uintp, numba.float32, numba.float_, numba.double, numba.complex64, ) assert types_to_ftylist([types]) == [ftylist]",
"test_ndarray_args_and_return(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and not",
"assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.type_self_return(f1) d3",
"= quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) d1 = f1(a, b,",
"isinstance(d1, quaternionic.array) f2 = quaternionic.utilities.type_self_return(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray)",
"= 10 * np.finfo(float).resolution np.random.seed(1234) one = quaternionic.array(1, 0, 0, 0) x =",
"x = quaternionic.array.random((7, 13, 4)) y = quaternionic.array.random((13, 4)) z = np.random.rand(13) arg0s",
"f3 = quaternionic.utilities.ndarray_args(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and not",
"quaternionic.array) assert isinstance(c, np.ndarray) and isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d,",
"def test_types_to_ftylist(): import numba types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types = '?bhilqpBHILQPfdgF->D' ftylist = numba.complex128(",
"isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array) return d a =",
"= 3 f3 = quaternionic.utilities.ndarray_args(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray)",
"not k.startswith('__'): f1 = getattr(quaternionic.algebra_ufuncs, k) f2 = getattr(quaternionic.algebra, k) sig = f2.signature",
"and isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args_and_return(f1) d3 = f3(a, b,",
"numba.longlong, numba.intp, numba.char, numba.ushort, numba.uintc, numba.uint, numba.ulonglong, numba.uintp, numba.float32, numba.float_, numba.double, numba.complex64, )",
"np.random.seed(1234) one = quaternionic.array(1, 0, 0, 0) x = quaternionic.array.random((7, 13, 4)) y",
"isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and",
"np.asarray(a).copy() assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and not",
"quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types = '?bhilqpBHILQPfdgF->D' ftylist = numba.complex128( numba.boolean, numba.byte, numba.short, numba.intc, numba.int_, numba.longlong,",
"isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args_and_return(f1) d3 =",
"= np.asarray(a).copy() assert isinstance(a, np.ndarray) and isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and isinstance(b,",
"b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and isinstance(a, quaternionic.array) assert isinstance(b,",
"f1.nin = 3 f3 = quaternionic.utilities.ndarray_args(f1) d3 = f3(a, b, c) assert isinstance(d3,",
"= quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args_and_return(f1) d2 = f2(a, b, c) assert",
"f2(a, b, c) assert isinstance(d2, np.ndarray) and not isinstance(d2, quaternionic.array) f1.nin = 3",
"3, 4)) f2 = quaternionic.utilities.ndarray_args(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray)",
"3, 4)) f2 = quaternionic.utilities.ndarray_args_and_return(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray)",
"import numpy as np import quaternionic import pytest def test_self_return(): def f1(a, b,",
"assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d,",
"f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) def test_ndarray_args(): def f1(a,",
"d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and not isinstance(d2, quaternionic.array) f1.nin",
"f1 = getattr(quaternionic.algebra_ufuncs, k) f2 = getattr(quaternionic.algebra, k) sig = f2.signature inputs =",
"4)) f2 = quaternionic.utilities.ndarray_args_and_return(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and",
"import quaternionic import pytest def test_self_return(): def f1(a, b, c): d = np.asarray(a).copy()",
"= quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types = '?bhilqpBHILQPfdgF->D' ftylist = numba.complex128( numba.boolean, numba.byte, numba.short, numba.intc, numba.int_,",
"quaternionic.utilities.ndarray_args_and_return(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin",
"= '?bhilqpBHILQPfdgF->D' ftylist = numba.complex128( numba.boolean, numba.byte, numba.short, numba.intc, numba.int_, numba.longlong, numba.intp, numba.char,",
"args.append(y.ndarray if inputs[1] == '(n)' else z) assert np.allclose( f1(*args), quaternionic.utilities.pyguvectorize(f2.types, f2.signature)(f2)(*args), atol=0.0,",
"c = quaternionic.array.random((11, 3, 4)) d1 = f1(a, b, c) assert isinstance(d1, np.ndarray)",
"numba.boolean, numba.byte, numba.short, numba.intc, numba.int_, numba.longlong, numba.intp, numba.char, numba.ushort, numba.uintc, numba.uint, numba.ulonglong, numba.uintp,",
"quaternionic.array.random((17, 3, 4)) b = quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4))",
"k in dir(quaternionic.algebra_ufuncs): if not k.startswith('__'): f1 = getattr(quaternionic.algebra_ufuncs, k) f2 = getattr(quaternionic.algebra,",
"= f2(a, b, c) assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3",
"isinstance(d3, np.ndarray) and not isinstance(d3, quaternionic.array) def test_ndarray_args_and_return(): def f1(a, b, c): d",
"f1.nin = 3 f3 = quaternionic.utilities.ndarray_args_and_return(f1) d3 = f3(a, b, c) assert isinstance(d3,",
"assert isinstance(d2, np.ndarray) and not isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args(f1)",
"quaternionic.array) assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and not",
"pypy\") def test_types_to_ftylist(): import numba types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types = '?bhilqpBHILQPfdgF->D' ftylist =",
"4)) b = quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) d1 =",
"if not k.startswith('__'): f1 = getattr(quaternionic.algebra_ufuncs, k) f2 = getattr(quaternionic.algebra, k) sig =",
"test_pyguvectorize(): _quaternion_resolution = 10 * np.finfo(float).resolution np.random.seed(1234) one = quaternionic.array(1, 0, 0, 0)",
"= [one, -(1+2*_quaternion_resolution)*one, -one, x] for k in dir(quaternionic.algebra_ufuncs): if not k.startswith('__'): f1",
"and not isinstance(d, quaternionic.array) return d a = quaternionic.array.random((17, 3, 4)) b =",
"-one, x] for k in dir(quaternionic.algebra_ufuncs): if not k.startswith('__'): f1 = getattr(quaternionic.algebra_ufuncs, k)",
"def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and not isinstance(a,",
"isinstance(a, np.ndarray) and isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and isinstance(b, quaternionic.array) assert isinstance(c,",
"sig.split('->')[0].split(',') for arg0 in arg0s: args = [arg0.ndarray] if inputs[0] == '(n)' else",
"isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args_and_return(f1) d3 = f3(a, b, c)",
"= f1(a, b, c) assert isinstance(d1, np.ndarray) and not isinstance(d1, quaternionic.array) f2 =",
"'(n)' else [z,] if len(inputs) > 1: args.append(y.ndarray if inputs[1] == '(n)' else",
"d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and",
"quaternionic.array) return d a = quaternionic.array.random((17, 3, 4)) b = quaternionic.array.random((13, 3, 4))",
"isinstance(d3, quaternionic.array) def test_ndarray_args_and_return(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a,",
"in arg0s: args = [arg0.ndarray] if inputs[0] == '(n)' else [z,] if len(inputs)",
"and not isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array) assert isinstance(c,",
"== [ftylist] def test_pyguvectorize(): _quaternion_resolution = 10 * np.finfo(float).resolution np.random.seed(1234) one = quaternionic.array(1,",
"isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and",
"= quaternionic.array.random((7, 13, 4)) y = quaternionic.array.random((13, 4)) z = np.random.rand(13) arg0s =",
"f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and isinstance(a, quaternionic.array) assert",
"quaternionic.utilities.ndarray_args_and_return(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower()",
"c) assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args_and_return(f1)",
"np.asarray(a).copy() assert isinstance(a, np.ndarray) and isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and isinstance(b, quaternionic.array)",
"assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and not isinstance(c,",
"quaternionic.array.random((11, 3, 4)) d1 = f1(a, b, c) assert isinstance(d1, np.ndarray) and not",
"quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.type_self_return(f1) d3 = f3(a, b, c) assert",
"assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason=\"No numba on pypy\")",
"np import quaternionic import pytest def test_self_return(): def f1(a, b, c): d =",
"10 * np.finfo(float).resolution np.random.seed(1234) one = quaternionic.array(1, 0, 0, 0) x = quaternionic.array.random((7,",
"assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args_and_return(f1) d3",
"numba.float_, numba.double, numba.complex64, ) assert types_to_ftylist([types]) == [ftylist] def test_pyguvectorize(): _quaternion_resolution = 10",
"4)) y = quaternionic.array.random((13, 4)) z = np.random.rand(13) arg0s = [one, -(1+2*_quaternion_resolution)*one, -one,",
"assert isinstance(a, np.ndarray) and isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and isinstance(b, quaternionic.array) assert",
"np.ndarray) and isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason=\"No numba on pypy\") def test_types_to_ftylist():",
"c) assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.type_self_return(f1)",
"c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args_and_return(f1) d2 = f2(a, b, c)",
"isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.type_self_return(f1) d3 =",
"np.ndarray) and not isinstance(d, quaternionic.array) return d a = quaternionic.array.random((17, 3, 4)) b",
"numba.float32, numba.float_, numba.double, numba.complex64, ) assert types_to_ftylist([types]) == [ftylist] def test_pyguvectorize(): _quaternion_resolution =",
"numba.ulonglong, numba.uintp, numba.float32, numba.float_, numba.double, numba.complex64, ) assert types_to_ftylist([types]) == [ftylist] def test_pyguvectorize():",
"4)) c = quaternionic.array.random((11, 3, 4)) d1 = f1(a, b, c) assert isinstance(d1,",
"assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array) return d a = quaternionic.array.random((17, 3,",
"numba.uintp, numba.float32, numba.float_, numba.double, numba.complex64, ) assert types_to_ftylist([types]) == [ftylist] def test_pyguvectorize(): _quaternion_resolution",
"arg0s: args = [arg0.ndarray] if inputs[0] == '(n)' else [z,] if len(inputs) >",
"def test_pyguvectorize(): _quaternion_resolution = 10 * np.finfo(float).resolution np.random.seed(1234) one = quaternionic.array(1, 0, 0,",
"isinstance(d, quaternionic.array) return d a = quaternionic.array.random((17, 3, 4)) b = quaternionic.array.random((13, 3,",
"isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array) return d a = quaternionic.array.random((17, 3, 4))",
"= quaternionic.array.random((13, 4)) z = np.random.rand(13) arg0s = [one, -(1+2*_quaternion_resolution)*one, -one, x] for",
"= quaternionic.utilities.type_self_return(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array)",
"sig = f2.signature inputs = sig.split('->')[0].split(',') for arg0 in arg0s: args = [arg0.ndarray]",
"np.ndarray) and isinstance(d3, quaternionic.array) def test_ndarray_args(): def f1(a, b, c): d = np.asarray(a).copy()",
"isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array)",
"numba.char, numba.ushort, numba.uintc, numba.uint, numba.ulonglong, numba.uintp, numba.float32, numba.float_, numba.double, numba.complex64, ) assert types_to_ftylist([types])",
"numba.short, numba.intc, numba.int_, numba.longlong, numba.intp, numba.char, numba.ushort, numba.uintc, numba.uint, numba.ulonglong, numba.uintp, numba.float32, numba.float_,",
"c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason=\"No numba on",
"def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and isinstance(a, quaternionic.array)",
"quaternionic.utilities.type_self_return(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin",
"0, 0) x = quaternionic.array.random((7, 13, 4)) y = quaternionic.array.random((13, 4)) z =",
"quaternionic.array) assert isinstance(b, np.ndarray) and isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and isinstance(c, quaternionic.array)",
"inputs[1] == '(n)' else z) assert np.allclose( f1(*args), quaternionic.utilities.pyguvectorize(f2.types, f2.signature)(f2)(*args), atol=0.0, rtol=_quaternion_resolution )",
"numba.complex128( numba.boolean, numba.byte, numba.short, numba.intc, numba.int_, numba.longlong, numba.intp, numba.char, numba.ushort, numba.uintc, numba.uint, numba.ulonglong,",
"numba on pypy\") def test_types_to_ftylist(): import numba types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types = '?bhilqpBHILQPfdgF->D'",
"np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args_and_return(f1) d3 = f3(a,",
"numba.double, numba.complex64, ) assert types_to_ftylist([types]) == [ftylist] def test_pyguvectorize(): _quaternion_resolution = 10 *",
"= 3 f3 = quaternionic.utilities.ndarray_args_and_return(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray)",
"isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) def test_ndarray_args(): def f1(a, b, c): d =",
"z = np.random.rand(13) arg0s = [one, -(1+2*_quaternion_resolution)*one, -one, x] for k in dir(quaternionic.algebra_ufuncs):",
"np.ndarray) and isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray)",
"d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and not isinstance(d3, quaternionic.array) def",
"def test_self_return(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and",
"not isinstance(d, quaternionic.array) return d a = quaternionic.array.random((17, 3, 4)) b = quaternionic.array.random((13,",
"3, 4)) b = quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) d1",
"f2 = quaternionic.utilities.ndarray_args(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and not",
"[ftylist] def test_pyguvectorize(): _quaternion_resolution = 10 * np.finfo(float).resolution np.random.seed(1234) one = quaternionic.array(1, 0,",
"= 3 f3 = quaternionic.utilities.type_self_return(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray)",
"isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args(f1) d3 = f3(a, b, c)",
"y = quaternionic.array.random((13, 4)) z = np.random.rand(13) arg0s = [one, -(1+2*_quaternion_resolution)*one, -one, x]",
"getattr(quaternionic.algebra, k) sig = f2.signature inputs = sig.split('->')[0].split(',') for arg0 in arg0s: args",
"np.finfo(float).resolution np.random.seed(1234) one = quaternionic.array(1, 0, 0, 0) x = quaternionic.array.random((7, 13, 4))",
"= np.random.rand(13) arg0s = [one, -(1+2*_quaternion_resolution)*one, -one, x] for k in dir(quaternionic.algebra_ufuncs): if",
"numba types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types = '?bhilqpBHILQPfdgF->D' ftylist = numba.complex128( numba.boolean, numba.byte, numba.short,",
"inputs = sig.split('->')[0].split(',') for arg0 in arg0s: args = [arg0.ndarray] if inputs[0] ==",
"not isinstance(d1, quaternionic.array) f2 = quaternionic.utilities.type_self_return(f1) d2 = f2(a, b, c) assert isinstance(d2,",
"4)) c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args_and_return(f1) d2 = f2(a, b,",
"for arg0 in arg0s: args = [arg0.ndarray] if inputs[0] == '(n)' else [z,]",
"0, 0, 0) x = quaternionic.array.random((7, 13, 4)) y = quaternionic.array.random((13, 4)) z",
"3, 4)) c = quaternionic.array.random((11, 3, 4)) d1 = f1(a, b, c) assert",
"ftylist = numba.complex128( numba.boolean, numba.byte, numba.short, numba.intc, numba.int_, numba.longlong, numba.intp, numba.char, numba.ushort, numba.uintc,",
"4)) b = quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) f2 =",
"* np.finfo(float).resolution np.random.seed(1234) one = quaternionic.array(1, 0, 0, 0) x = quaternionic.array.random((7, 13,",
"assert types_to_ftylist([types]) == [ftylist] def test_pyguvectorize(): _quaternion_resolution = 10 * np.finfo(float).resolution np.random.seed(1234) one",
"assert isinstance(c, np.ndarray) and isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array)",
"import pytest def test_self_return(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a,",
"quaternionic.utilities.ndarray_args(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and not isinstance(d2, quaternionic.array)",
"len(inputs) > 1: args.append(y.ndarray if inputs[1] == '(n)' else z) assert np.allclose( f1(*args),",
"[arg0.ndarray] if inputs[0] == '(n)' else [z,] if len(inputs) > 1: args.append(y.ndarray if",
"quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args_and_return(f1) d2 =",
"types_to_ftylist([types]) == [ftylist] def test_pyguvectorize(): _quaternion_resolution = 10 * np.finfo(float).resolution np.random.seed(1234) one =",
"c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array) assert isinstance(b,",
"if len(inputs) > 1: args.append(y.ndarray if inputs[1] == '(n)' else z) assert np.allclose(",
"quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args(f1) d3 = f3(a, b, c) assert",
"for k in dir(quaternionic.algebra_ufuncs): if not k.startswith('__'): f1 = getattr(quaternionic.algebra_ufuncs, k) f2 =",
"== 'pypy', reason=\"No numba on pypy\") def test_types_to_ftylist(): import numba types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist",
"isinstance(d2, np.ndarray) and not isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args(f1) d3",
"quaternionic import pytest def test_self_return(): def f1(a, b, c): d = np.asarray(a).copy() assert",
"= getattr(quaternionic.algebra, k) sig = f2.signature inputs = sig.split('->')[0].split(',') for arg0 in arg0s:",
"and isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.type_self_return(f1) d3 = f3(a, b,",
"3, 4)) d1 = f1(a, b, c) assert isinstance(d1, np.ndarray) and not isinstance(d1,",
"= np.asarray(a).copy() assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and",
"quaternionic.array.random((13, 4)) z = np.random.rand(13) arg0s = [one, -(1+2*_quaternion_resolution)*one, -one, x] for k",
") assert types_to_ftylist([types]) == [ftylist] def test_pyguvectorize(): _quaternion_resolution = 10 * np.finfo(float).resolution np.random.seed(1234)",
"f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array)",
"f3 = quaternionic.utilities.type_self_return(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3,",
"= f2.signature inputs = sig.split('->')[0].split(',') for arg0 in arg0s: args = [arg0.ndarray] if",
"4)) c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args(f1) d2 = f2(a, b,",
"quaternionic.array) assert isinstance(c, np.ndarray) and not isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not",
"3, 4)) b = quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) f2",
"np.ndarray) and isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray)",
"= f2(a, b, c) assert isinstance(d2, np.ndarray) and not isinstance(d2, quaternionic.array) f1.nin =",
"import numba types_to_ftylist = quaternionic.utilities.convert_numpy_ufunc_type_to_numba_ftylist types = '?bhilqpBHILQPfdgF->D' ftylist = numba.complex128( numba.boolean, numba.byte,",
"np.ndarray) and not isinstance(d1, quaternionic.array) f2 = quaternionic.utilities.type_self_return(f1) d2 = f2(a, b, c)",
"isinstance(d1, np.ndarray) and not isinstance(d1, quaternionic.array) f2 = quaternionic.utilities.type_self_return(f1) d2 = f2(a, b,",
"b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason=\"No numba",
"quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args_and_return(f1) d3 = f3(a, b, c) assert",
"4)) f2 = quaternionic.utilities.ndarray_args(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and",
"isinstance(c, np.ndarray) and isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array) return",
"test_ndarray_args(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and not",
"= quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args(f1) d2 = f2(a, b, c) assert",
"f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason=\"No",
"= quaternionic.utilities.ndarray_args(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and not isinstance(d2,",
"3, 4)) c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args(f1) d2 = f2(a,",
"as np import quaternionic import pytest def test_self_return(): def f1(a, b, c): d",
"d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) def test_ndarray_args():",
"> 1: args.append(y.ndarray if inputs[1] == '(n)' else z) assert np.allclose( f1(*args), quaternionic.utilities.pyguvectorize(f2.types,",
"one = quaternionic.array(1, 0, 0, 0) x = quaternionic.array.random((7, 13, 4)) y =",
"x] for k in dir(quaternionic.algebra_ufuncs): if not k.startswith('__'): f1 = getattr(quaternionic.algebra_ufuncs, k) f2",
"3 f3 = quaternionic.utilities.type_self_return(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and",
"np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.type_self_return(f1) d3 = f3(a,",
"test_self_return(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and isinstance(a,",
"b, c) assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin = 3 f3 =",
"b = quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) d1 = f1(a,",
"b, c) assert isinstance(d3, np.ndarray) and not isinstance(d3, quaternionic.array) def test_ndarray_args_and_return(): def f1(a,",
"d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and not isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray)",
"== '(n)' else [z,] if len(inputs) > 1: args.append(y.ndarray if inputs[1] == '(n)'",
"return d a = quaternionic.array.random((17, 3, 4)) b = quaternionic.array.random((13, 3, 4)) c",
"d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() ==",
"arg0s = [one, -(1+2*_quaternion_resolution)*one, -one, x] for k in dir(quaternionic.algebra_ufuncs): if not k.startswith('__'):",
"isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) @pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason=\"No numba on pypy\") def",
"np.ndarray) and not isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args(f1) d3 =",
"= quaternionic.array(1, 0, 0, 0) x = quaternionic.array.random((7, 13, 4)) y = quaternionic.array.random((13,",
"if inputs[1] == '(n)' else z) assert np.allclose( f1(*args), quaternionic.utilities.pyguvectorize(f2.types, f2.signature)(f2)(*args), atol=0.0, rtol=_quaternion_resolution",
"quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array) return d a = quaternionic.array.random((17,",
"= quaternionic.array.random((17, 3, 4)) b = quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3,",
"d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and isinstance(d2, quaternionic.array) f1.nin =",
"def test_ndarray_args(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray) and",
"b, c) assert isinstance(d1, np.ndarray) and not isinstance(d1, quaternionic.array) f2 = quaternionic.utilities.type_self_return(f1) d2",
"and not isinstance(d1, quaternionic.array) f2 = quaternionic.utilities.type_self_return(f1) d2 = f2(a, b, c) assert",
"[z,] if len(inputs) > 1: args.append(y.ndarray if inputs[1] == '(n)' else z) assert",
"isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not",
"and isinstance(d3, quaternionic.array) def test_ndarray_args(): def f1(a, b, c): d = np.asarray(a).copy() assert",
"assert isinstance(b, np.ndarray) and isinstance(b, quaternionic.array) assert isinstance(c, np.ndarray) and isinstance(c, quaternionic.array) assert",
"numba.int_, numba.longlong, numba.intp, numba.char, numba.ushort, numba.uintc, numba.uint, numba.ulonglong, numba.uintp, numba.float32, numba.float_, numba.double, numba.complex64,",
"@pytest.mark.skipif(sys.implementation.name.lower() == 'pypy', reason=\"No numba on pypy\") def test_types_to_ftylist(): import numba types_to_ftylist =",
"-(1+2*_quaternion_resolution)*one, -one, x] for k in dir(quaternionic.algebra_ufuncs): if not k.startswith('__'): f1 = getattr(quaternionic.algebra_ufuncs,",
"numba.intc, numba.int_, numba.longlong, numba.intp, numba.char, numba.ushort, numba.uintc, numba.uint, numba.ulonglong, numba.uintp, numba.float32, numba.float_, numba.double,",
"f2 = quaternionic.utilities.ndarray_args_and_return(f1) d2 = f2(a, b, c) assert isinstance(d2, np.ndarray) and isinstance(d2,",
"pytest def test_self_return(): def f1(a, b, c): d = np.asarray(a).copy() assert isinstance(a, np.ndarray)",
"numba.intp, numba.char, numba.ushort, numba.uintc, numba.uint, numba.ulonglong, numba.uintp, numba.float32, numba.float_, numba.double, numba.complex64, ) assert",
"np.ndarray) and isinstance(c, quaternionic.array) assert isinstance(d, np.ndarray) and not isinstance(d, quaternionic.array) return d",
"b = quaternionic.array.random((13, 3, 4)) c = quaternionic.array.random((11, 3, 4)) f2 = quaternionic.utilities.ndarray_args(f1)",
"np.ndarray) and not isinstance(a, quaternionic.array) assert isinstance(b, np.ndarray) and not isinstance(b, quaternionic.array) assert",
"= quaternionic.utilities.type_self_return(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array)",
"f3 = quaternionic.utilities.ndarray_args_and_return(f1) d3 = f3(a, b, c) assert isinstance(d3, np.ndarray) and isinstance(d3,",
"c) assert isinstance(d3, np.ndarray) and not isinstance(d3, quaternionic.array) def test_ndarray_args_and_return(): def f1(a, b,",
"and not isinstance(d3, quaternionic.array) def test_ndarray_args_and_return(): def f1(a, b, c): d = np.asarray(a).copy()",
"in dir(quaternionic.algebra_ufuncs): if not k.startswith('__'): f1 = getattr(quaternionic.algebra_ufuncs, k) f2 = getattr(quaternionic.algebra, k)",
"= numba.complex128( numba.boolean, numba.byte, numba.short, numba.intc, numba.int_, numba.longlong, numba.intp, numba.char, numba.ushort, numba.uintc, numba.uint,",
"and not isinstance(d2, quaternionic.array) f1.nin = 3 f3 = quaternionic.utilities.ndarray_args(f1) d3 = f3(a,",
"f2.signature inputs = sig.split('->')[0].split(',') for arg0 in arg0s: args = [arg0.ndarray] if inputs[0]",
"c) assert isinstance(d3, np.ndarray) and isinstance(d3, quaternionic.array) def test_ndarray_args(): def f1(a, b, c):",
"'?bhilqpBHILQPfdgF->D' ftylist = numba.complex128( numba.boolean, numba.byte, numba.short, numba.intc, numba.int_, numba.longlong, numba.intp, numba.char, numba.ushort,"
] |
[
"[] for x in A: B.append(f1(x)) ''' B = list(map(f1, A)) print(B) C",
"__future__ import unicode_literals def f1(x): return x + 3 a = 1 b",
"utf-8 -*- from __future__ import unicode_literals def f1(x): return x + 3 a",
"A: B.append(f1(x)) ''' B = list(map(f1, A)) print(B) C = list(map(lambda x: x",
"2, 3] ''' B = [] for x in A: B.append(f1(x)) ''' B",
"coding: utf-8 -*- from __future__ import unicode_literals def f1(x): return x + 3",
"= [] for x in A: B.append(f1(x)) ''' B = list(map(f1, A)) print(B)",
"3] ''' B = [] for x in A: B.append(f1(x)) ''' B =",
"''' B = [] for x in A: B.append(f1(x)) ''' B = list(map(f1,",
"in A: B.append(f1(x)) ''' B = list(map(f1, A)) print(B) C = list(map(lambda x:",
"-*- from __future__ import unicode_literals def f1(x): return x + 3 a =",
"print(b) A = [1, 2, 3] ''' B = [] for x in",
"for x in A: B.append(f1(x)) ''' B = list(map(f1, A)) print(B) C =",
"x in A: B.append(f1(x)) ''' B = list(map(f1, A)) print(B) C = list(map(lambda",
"unicode_literals def f1(x): return x + 3 a = 1 b = f1(a)",
"''' B = list(map(f1, A)) print(B) C = list(map(lambda x: x + 3,",
"a = 1 b = f1(a) print(b) A = [1, 2, 3] '''",
"x + 3 a = 1 b = f1(a) print(b) A = [1,",
"A = [1, 2, 3] ''' B = [] for x in A:",
"f1(a) print(b) A = [1, 2, 3] ''' B = [] for x",
"def f1(x): return x + 3 a = 1 b = f1(a) print(b)",
"= f1(a) print(b) A = [1, 2, 3] ''' B = [] for",
"-*- coding: utf-8 -*- from __future__ import unicode_literals def f1(x): return x +",
"import unicode_literals def f1(x): return x + 3 a = 1 b =",
"1 b = f1(a) print(b) A = [1, 2, 3] ''' B =",
"B = list(map(f1, A)) print(B) C = list(map(lambda x: x + 3, A))",
"3 a = 1 b = f1(a) print(b) A = [1, 2, 3]",
"[1, 2, 3] ''' B = [] for x in A: B.append(f1(x)) '''",
"# -*- coding: utf-8 -*- from __future__ import unicode_literals def f1(x): return x",
"b = f1(a) print(b) A = [1, 2, 3] ''' B = []",
"= list(map(f1, A)) print(B) C = list(map(lambda x: x + 3, A)) print(C)",
"= [1, 2, 3] ''' B = [] for x in A: B.append(f1(x))",
"f1(x): return x + 3 a = 1 b = f1(a) print(b) A",
"from __future__ import unicode_literals def f1(x): return x + 3 a = 1",
"B.append(f1(x)) ''' B = list(map(f1, A)) print(B) C = list(map(lambda x: x +",
"B = [] for x in A: B.append(f1(x)) ''' B = list(map(f1, A))",
"return x + 3 a = 1 b = f1(a) print(b) A =",
"= 1 b = f1(a) print(b) A = [1, 2, 3] ''' B",
"+ 3 a = 1 b = f1(a) print(b) A = [1, 2,"
] |
[
"is None: d[nums[i]] = i else: return [d[find], i] class Solution1: # @return",
"(index1, index2) def twoSum(self, num, target): tmp = {} for i in range(len(num)):",
"# @return a tuple, (index1, index2) def twoSum(self, num, target): tmp = {}",
"- num[i] in tmp: return([tmp[target - num[i]], i]) else: tmp[num[i]] = i nums",
":rtype: List[int] \"\"\" d = {} for i in range(len(nums)): find = target",
"= i nums = [2, 5, 2, 11, 15] target = 4 a",
"Solution(object): def twoSum(self, nums, target): \"\"\" :type nums: List[int] :type target: int :rtype:",
"{} for i in range(len(nums)): find = target - nums[i] if d.get(find, None)",
"d[nums[i]] = i else: return [d[find], i] class Solution1: # @return a tuple,",
"2, 11, 15] target = 4 a = Solution() print(a.twoSum(nums, target)) b =",
"- num[i]], i]) else: tmp[num[i]] = i nums = [2, 5, 2, 11,",
"in tmp: return([tmp[target - num[i]], i]) else: tmp[num[i]] = i nums = [2,",
"nums, target): \"\"\" :type nums: List[int] :type target: int :rtype: List[int] \"\"\" d",
"range(len(num)): if target - num[i] in tmp: return([tmp[target - num[i]], i]) else: tmp[num[i]]",
"nums = [2, 5, 2, 11, 15] target = 4 a = Solution()",
"d = {} for i in range(len(nums)): find = target - nums[i] if",
"twoSum(self, num, target): tmp = {} for i in range(len(num)): if target -",
"tmp[num[i]] = i nums = [2, 5, 2, 11, 15] target = 4",
"i nums = [2, 5, 2, 11, 15] target = 4 a =",
"in range(len(num)): if target - num[i] in tmp: return([tmp[target - num[i]], i]) else:",
"List[int] \"\"\" d = {} for i in range(len(nums)): find = target -",
"tmp = {} for i in range(len(num)): if target - num[i] in tmp:",
"List[int] :type target: int :rtype: List[int] \"\"\" d = {} for i in",
"for i in range(len(num)): if target - num[i] in tmp: return([tmp[target - num[i]],",
"index2) def twoSum(self, num, target): tmp = {} for i in range(len(num)): if",
":type target: int :rtype: List[int] \"\"\" d = {} for i in range(len(nums)):",
"= [2, 5, 2, 11, 15] target = 4 a = Solution() print(a.twoSum(nums,",
"class Solution(object): def twoSum(self, nums, target): \"\"\" :type nums: List[int] :type target: int",
"for i in range(len(nums)): find = target - nums[i] if d.get(find, None) is",
"= target - nums[i] if d.get(find, None) is None: d[nums[i]] = i else:",
"def twoSum(self, num, target): tmp = {} for i in range(len(num)): if target",
"5, 2, 11, 15] target = 4 a = Solution() print(a.twoSum(nums, target)) b",
"class Solution1: # @return a tuple, (index1, index2) def twoSum(self, num, target): tmp",
"i in range(len(nums)): find = target - nums[i] if d.get(find, None) is None:",
"target - num[i] in tmp: return([tmp[target - num[i]], i]) else: tmp[num[i]] = i",
"- nums[i] if d.get(find, None) is None: d[nums[i]] = i else: return [d[find],",
"int :rtype: List[int] \"\"\" d = {} for i in range(len(nums)): find =",
"i else: return [d[find], i] class Solution1: # @return a tuple, (index1, index2)",
"target = 4 a = Solution() print(a.twoSum(nums, target)) b = Solution1() print(b.twoSum(nums, target))",
"tuple, (index1, index2) def twoSum(self, num, target): tmp = {} for i in",
"i] class Solution1: # @return a tuple, (index1, index2) def twoSum(self, num, target):",
"target): tmp = {} for i in range(len(num)): if target - num[i] in",
"11, 15] target = 4 a = Solution() print(a.twoSum(nums, target)) b = Solution1()",
"num[i]], i]) else: tmp[num[i]] = i nums = [2, 5, 2, 11, 15]",
"i]) else: tmp[num[i]] = i nums = [2, 5, 2, 11, 15] target",
"num, target): tmp = {} for i in range(len(num)): if target - num[i]",
"num[i] in tmp: return([tmp[target - num[i]], i]) else: tmp[num[i]] = i nums =",
"@return a tuple, (index1, index2) def twoSum(self, num, target): tmp = {} for",
"i in range(len(num)): if target - num[i] in tmp: return([tmp[target - num[i]], i])",
"a tuple, (index1, index2) def twoSum(self, num, target): tmp = {} for i",
"tmp: return([tmp[target - num[i]], i]) else: tmp[num[i]] = i nums = [2, 5,",
"None: d[nums[i]] = i else: return [d[find], i] class Solution1: # @return a",
"if d.get(find, None) is None: d[nums[i]] = i else: return [d[find], i] class",
"else: tmp[num[i]] = i nums = [2, 5, 2, 11, 15] target =",
"range(len(nums)): find = target - nums[i] if d.get(find, None) is None: d[nums[i]] =",
"if target - num[i] in tmp: return([tmp[target - num[i]], i]) else: tmp[num[i]] =",
"return [d[find], i] class Solution1: # @return a tuple, (index1, index2) def twoSum(self,",
"d.get(find, None) is None: d[nums[i]] = i else: return [d[find], i] class Solution1:",
"nums[i] if d.get(find, None) is None: d[nums[i]] = i else: return [d[find], i]",
"{} for i in range(len(num)): if target - num[i] in tmp: return([tmp[target -",
"target: int :rtype: List[int] \"\"\" d = {} for i in range(len(nums)): find",
"target): \"\"\" :type nums: List[int] :type target: int :rtype: List[int] \"\"\" d =",
"15] target = 4 a = Solution() print(a.twoSum(nums, target)) b = Solution1() print(b.twoSum(nums,",
"twoSum(self, nums, target): \"\"\" :type nums: List[int] :type target: int :rtype: List[int] \"\"\"",
"\"\"\" d = {} for i in range(len(nums)): find = target - nums[i]",
"nums: List[int] :type target: int :rtype: List[int] \"\"\" d = {} for i",
"in range(len(nums)): find = target - nums[i] if d.get(find, None) is None: d[nums[i]]",
"[2, 5, 2, 11, 15] target = 4 a = Solution() print(a.twoSum(nums, target))",
"= {} for i in range(len(num)): if target - num[i] in tmp: return([tmp[target",
"target - nums[i] if d.get(find, None) is None: d[nums[i]] = i else: return",
"[d[find], i] class Solution1: # @return a tuple, (index1, index2) def twoSum(self, num,",
"else: return [d[find], i] class Solution1: # @return a tuple, (index1, index2) def",
"def twoSum(self, nums, target): \"\"\" :type nums: List[int] :type target: int :rtype: List[int]",
"\"\"\" :type nums: List[int] :type target: int :rtype: List[int] \"\"\" d = {}",
"= i else: return [d[find], i] class Solution1: # @return a tuple, (index1,",
"None) is None: d[nums[i]] = i else: return [d[find], i] class Solution1: #",
"Solution1: # @return a tuple, (index1, index2) def twoSum(self, num, target): tmp =",
"return([tmp[target - num[i]], i]) else: tmp[num[i]] = i nums = [2, 5, 2,",
"= {} for i in range(len(nums)): find = target - nums[i] if d.get(find,",
"find = target - nums[i] if d.get(find, None) is None: d[nums[i]] = i",
":type nums: List[int] :type target: int :rtype: List[int] \"\"\" d = {} for"
] |
[
"depth2 = aim2 = 0 # Process each line of text for l",
"* depth1}') print(f'Problem 2: Ending pos = {horiz2}, depth = {depth2}, aim =",
"depth1}') print(f'Problem 2: Ending pos = {horiz2}, depth = {depth2}, aim = {aim2},",
"= {horiz2}, depth = {depth2}, aim = {aim2}, product = {horiz2 * depth2}')",
"in open('input.txt'): # Get instruction and value instruction, n = l.split() n =",
"# State variables for problems 1 & 2 horiz1 = depth1 = 0",
"# Show results for both problems print(f'Problem 1: Ending pos = {horiz1}, depth",
"{horiz1 * depth1}') print(f'Problem 2: Ending pos = {horiz2}, depth = {depth2}, aim",
"2 # State variables for problems 1 & 2 horiz1 = depth1 =",
"horiz2 = depth2 = aim2 = 0 # Process each line of text",
"horiz2 += n depth2 += aim2 * n elif instruction == 'down': depth1",
"State variables for problems 1 & 2 horiz1 = depth1 = 0 horiz2",
"Ending pos = {horiz1}, depth = {depth1}, product = {horiz1 * depth1}') print(f'Problem",
"l in open('input.txt'): # Get instruction and value instruction, n = l.split() n",
"instruction) # Show results for both problems print(f'Problem 1: Ending pos = {horiz1},",
"== 'down': depth1 += n aim2 += n elif instruction == 'up': depth1",
"# Process instructions for both problems if instruction == 'forward': horiz1 += n",
"'down': depth1 += n aim2 += n elif instruction == 'up': depth1 -=",
"n else: print('Bad instruction:', instruction) # Show results for both problems print(f'Problem 1:",
"aim2 += n elif instruction == 'up': depth1 -= n aim2 -= n",
"-= n else: print('Bad instruction:', instruction) # Show results for both problems print(f'Problem",
"elif instruction == 'down': depth1 += n aim2 += n elif instruction ==",
"= l.split() n = int(n) # Process instructions for both problems if instruction",
"instruction, n = l.split() n = int(n) # Process instructions for both problems",
"Day 2 # State variables for problems 1 & 2 horiz1 = depth1",
"pos = {horiz2}, depth = {depth2}, aim = {aim2}, product = {horiz2 *",
"else: print('Bad instruction:', instruction) # Show results for both problems print(f'Problem 1: Ending",
"pos = {horiz1}, depth = {depth1}, product = {horiz1 * depth1}') print(f'Problem 2:",
"Ending pos = {horiz2}, depth = {depth2}, aim = {aim2}, product = {horiz2",
"of text for l in open('input.txt'): # Get instruction and value instruction, n",
"n = int(n) # Process instructions for both problems if instruction == 'forward':",
"horiz1 = depth1 = 0 horiz2 = depth2 = aim2 = 0 #",
"# Get instruction and value instruction, n = l.split() n = int(n) #",
"'forward': horiz1 += n horiz2 += n depth2 += aim2 * n elif",
"n = l.split() n = int(n) # Process instructions for both problems if",
"n elif instruction == 'up': depth1 -= n aim2 -= n else: print('Bad",
"elif instruction == 'up': depth1 -= n aim2 -= n else: print('Bad instruction:',",
"+= aim2 * n elif instruction == 'down': depth1 += n aim2 +=",
"n horiz2 += n depth2 += aim2 * n elif instruction == 'down':",
"n aim2 += n elif instruction == 'up': depth1 -= n aim2 -=",
"each line of text for l in open('input.txt'): # Get instruction and value",
"= 0 # Process each line of text for l in open('input.txt'): #",
"line of text for l in open('input.txt'): # Get instruction and value instruction,",
"0 horiz2 = depth2 = aim2 = 0 # Process each line of",
"# Process each line of text for l in open('input.txt'): # Get instruction",
"if instruction == 'forward': horiz1 += n horiz2 += n depth2 += aim2",
"= int(n) # Process instructions for both problems if instruction == 'forward': horiz1",
"depth = {depth1}, product = {horiz1 * depth1}') print(f'Problem 2: Ending pos =",
"<gh_stars>0 # Advent of Code, Day 2 # State variables for problems 1",
"depth1 -= n aim2 -= n else: print('Bad instruction:', instruction) # Show results",
"l.split() n = int(n) # Process instructions for both problems if instruction ==",
"instructions for both problems if instruction == 'forward': horiz1 += n horiz2 +=",
"for both problems if instruction == 'forward': horiz1 += n horiz2 += n",
"n depth2 += aim2 * n elif instruction == 'down': depth1 += n",
"of Code, Day 2 # State variables for problems 1 & 2 horiz1",
"* n elif instruction == 'down': depth1 += n aim2 += n elif",
"0 # Process each line of text for l in open('input.txt'): # Get",
"depth1 += n aim2 += n elif instruction == 'up': depth1 -= n",
"= {depth1}, product = {horiz1 * depth1}') print(f'Problem 2: Ending pos = {horiz2},",
"Process instructions for both problems if instruction == 'forward': horiz1 += n horiz2",
"instruction and value instruction, n = l.split() n = int(n) # Process instructions",
"both problems print(f'Problem 1: Ending pos = {horiz1}, depth = {depth1}, product =",
"1 & 2 horiz1 = depth1 = 0 horiz2 = depth2 = aim2",
"Code, Day 2 # State variables for problems 1 & 2 horiz1 =",
"results for both problems print(f'Problem 1: Ending pos = {horiz1}, depth = {depth1},",
"& 2 horiz1 = depth1 = 0 horiz2 = depth2 = aim2 =",
"= depth1 = 0 horiz2 = depth2 = aim2 = 0 # Process",
"= {horiz1 * depth1}') print(f'Problem 2: Ending pos = {horiz2}, depth = {depth2},",
"text for l in open('input.txt'): # Get instruction and value instruction, n =",
"instruction == 'up': depth1 -= n aim2 -= n else: print('Bad instruction:', instruction)",
"= aim2 = 0 # Process each line of text for l in",
"1: Ending pos = {horiz1}, depth = {depth1}, product = {horiz1 * depth1}')",
"= 0 horiz2 = depth2 = aim2 = 0 # Process each line",
"Show results for both problems print(f'Problem 1: Ending pos = {horiz1}, depth =",
"print(f'Problem 1: Ending pos = {horiz1}, depth = {depth1}, product = {horiz1 *",
"print('Bad instruction:', instruction) # Show results for both problems print(f'Problem 1: Ending pos",
"both problems if instruction == 'forward': horiz1 += n horiz2 += n depth2",
"== 'up': depth1 -= n aim2 -= n else: print('Bad instruction:', instruction) #",
"for both problems print(f'Problem 1: Ending pos = {horiz1}, depth = {depth1}, product",
"instruction == 'forward': horiz1 += n horiz2 += n depth2 += aim2 *",
"-= n aim2 -= n else: print('Bad instruction:', instruction) # Show results for",
"= depth2 = aim2 = 0 # Process each line of text for",
"for problems 1 & 2 horiz1 = depth1 = 0 horiz2 = depth2",
"{horiz1}, depth = {depth1}, product = {horiz1 * depth1}') print(f'Problem 2: Ending pos",
"print(f'Problem 2: Ending pos = {horiz2}, depth = {depth2}, aim = {aim2}, product",
"problems print(f'Problem 1: Ending pos = {horiz1}, depth = {depth1}, product = {horiz1",
"n elif instruction == 'down': depth1 += n aim2 += n elif instruction",
"2 horiz1 = depth1 = 0 horiz2 = depth2 = aim2 = 0",
"{depth1}, product = {horiz1 * depth1}') print(f'Problem 2: Ending pos = {horiz2}, depth",
"'up': depth1 -= n aim2 -= n else: print('Bad instruction:', instruction) # Show",
"product = {horiz1 * depth1}') print(f'Problem 2: Ending pos = {horiz2}, depth =",
"aim2 * n elif instruction == 'down': depth1 += n aim2 += n",
"aim2 = 0 # Process each line of text for l in open('input.txt'):",
"horiz1 += n horiz2 += n depth2 += aim2 * n elif instruction",
"variables for problems 1 & 2 horiz1 = depth1 = 0 horiz2 =",
"depth2 += aim2 * n elif instruction == 'down': depth1 += n aim2",
"= {horiz1}, depth = {depth1}, product = {horiz1 * depth1}') print(f'Problem 2: Ending",
"+= n depth2 += aim2 * n elif instruction == 'down': depth1 +=",
"for l in open('input.txt'): # Get instruction and value instruction, n = l.split()",
"+= n aim2 += n elif instruction == 'up': depth1 -= n aim2",
"value instruction, n = l.split() n = int(n) # Process instructions for both",
"instruction:', instruction) # Show results for both problems print(f'Problem 1: Ending pos =",
"open('input.txt'): # Get instruction and value instruction, n = l.split() n = int(n)",
"int(n) # Process instructions for both problems if instruction == 'forward': horiz1 +=",
"and value instruction, n = l.split() n = int(n) # Process instructions for",
"aim2 -= n else: print('Bad instruction:', instruction) # Show results for both problems",
"instruction == 'down': depth1 += n aim2 += n elif instruction == 'up':",
"n aim2 -= n else: print('Bad instruction:', instruction) # Show results for both",
"2: Ending pos = {horiz2}, depth = {depth2}, aim = {aim2}, product =",
"Advent of Code, Day 2 # State variables for problems 1 & 2",
"problems if instruction == 'forward': horiz1 += n horiz2 += n depth2 +=",
"problems 1 & 2 horiz1 = depth1 = 0 horiz2 = depth2 =",
"depth1 = 0 horiz2 = depth2 = aim2 = 0 # Process each",
"== 'forward': horiz1 += n horiz2 += n depth2 += aim2 * n",
"+= n elif instruction == 'up': depth1 -= n aim2 -= n else:",
"Get instruction and value instruction, n = l.split() n = int(n) # Process",
"+= n horiz2 += n depth2 += aim2 * n elif instruction ==",
"Process each line of text for l in open('input.txt'): # Get instruction and",
"# Advent of Code, Day 2 # State variables for problems 1 &"
] |
[
"import configparser import ctypes import io import mmap import msvcrt import multiprocessing as",
"+ game_name, save_data, server_manager)) process.start() print(\"Successfully configured bots. Setting flag for injected dll.\")",
"gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID",
"callbacks.append(callback) process = mp.Process(target=run_agent, args=(quit_event, callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i, bot_modules[i], save_path +",
"model hash,\", e) server_manager.set_player_amount(num_participants, num_team_0) # Create Quit event quit_event = mp.Event() #",
"bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header,",
"in dict: new_name = name[:31] # Make sure name does not exceed 31",
"= framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name'))",
"the hash in the config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception as e: print (\"Couldn't",
"print (\"Couldn't get model hash,\", e) server_manager.set_player_amount(num_participants, num_team_0) # Create Quit event quit_event",
"' + game_name + 'in ' + save_path) gameInputPacket.iNumPlayers = num_participants server_manager.load_config() num_team_0",
"game_name + 'in ' + save_path) gameInputPacket.iNumPlayers = num_participants server_manager.load_config() num_team_0 = 0",
"print(\"Successfully configured bots. Setting flag for injected dll.\") gameInputPacket.bStartMatch = True # Wait",
"for an error code time.sleep(0.1) game_data_shared_memory = mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory)",
"1000)) if save_data: print(save_path) if not os.path.exists(save_path): print(os.path.dirname(save_path) + ' does not exist",
"if not os.path.exists(joined_path): os.makedirs(joined_path) print('gameName: ' + game_name + 'in ' + save_path)",
"+ str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num",
"quit_event = mp.Event() # Launch processes for i in range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback",
"gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID",
"for i in range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback = mp.Event() callbacks.append(callback) process = mp.Process(target=run_agent,",
"= bi.GameInputPacket.from_buffer(buff) # Determine number of participants num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER,",
"[] callbacks = [] bot_parameter_list = [] name_dict = dict() save_data = True",
"print('gameName: ' + game_name + 'in ' + save_path) gameInputPacket.iNumPlayers = num_participants server_manager.load_config()",
"sys import random import time import bot_input_struct as bi import bot_manager import game_data_struct",
"print(save_path) if not os.path.exists(save_path): print(os.path.dirname(save_path) + ' does not exist creating') os.makedirs(save_path) joined_path",
"server_manager = ServerConverter('http://saltie.tk:5000', True, True, True, username='unknown') except ImportError: server_manager = ServerConverter('', False,",
"server') print('Check Discord server for information') if server_manager.error: server_manager.warn_server('unable to connect to server')",
"to server') def get_bot_config_file_list(botCount, config): config_file_list = [] for i in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER,",
"server_manager.warn_server('unable to connect to server') def get_bot_config_file_list(botCount, config): config_file_list = [] for i",
"True # Wait 100 milliseconds then check for an error code time.sleep(0.1) game_data_shared_memory",
"+ str(count + 1) + \")\" # Truncate at 27 because we can",
"= bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID =",
"handle duplicates def get_sanitized_bot_name(dict, name): if name not in dict: new_name = name[:31]",
"in range(num_participants): bot_config_path = participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config = configparser.RawConfigParser() if server_manager.download_config: if 'saltie'",
"import ServerConverter PARTICPANT_CONFIGURATION_HEADER = 'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX",
"'in ' + save_path) gameInputPacket.iNumPlayers = num_participants server_manager.load_config() num_team_0 = 0 # Set",
"not os.path.exists(joined_path): os.makedirs(joined_path) print('gameName: ' + game_name + 'in ' + save_path) gameInputPacket.iNumPlayers",
"save_path = os.path.join(os.getcwd(), 'bot_code', 'training', 'replays') game_name = str(int(round(time.time() * 1000))) + '-'",
"and store name and team for i in range(num_participants): bot_config_path = participant_configs[i] sys.path.append(os.path.dirname(bot_config_path))",
"Wait 100 milliseconds then check for an error code time.sleep(0.1) game_data_shared_memory = mmap.mmap(-1,",
"callback.is_set(): terminated = False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any character to exit\") msvcrt.getch() print(\"Shutting",
"gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback = mp.Event() callbacks.append(callback) process = mp.Process(target=run_agent, args=(quit_event, callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i],",
"Loadout Orange' BOT_CONFIG_MODULE_HEADER = 'Bot Location' USER_CONFIGURATION_HEADER = 'User Info' BOT_CONFIG_AGENT_HEADER = 'Bot",
"True, True, True, username='unknown') except ImportError: server_manager = ServerConverter('', False, False, False) print('config.py",
"= 'Bot Parameters' try: server_manager = ServerConverter('http://saltie.tk:5000', True, True, True, username='unknown') except ImportError:",
"str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i, bot_modules[i], save_path + '\\\\' + game_name, save_data, server_manager)) process.start() print(\"Successfully",
"'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header, 'wheels_id')",
"= 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER = 'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant Loadout Orange' BOT_CONFIG_MODULE_HEADER =",
"then check for an error code time.sleep(0.1) game_data_shared_memory = mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output",
"then raise an exception quit_event.set() terminated = False while not terminated: terminated =",
"bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT')",
"False) print('config.py not present, cannot upload replays to collective server') print('Check Discord server",
"run_agent(terminate_event, callback_event, config_file, name, team, index, module_name, game_name, save_data, server_uploader): bm = bot_manager.BotManager(terminate_event,",
"'Bot Parameters' try: server_manager = ServerConverter('http://saltie.tk:5000', True, True, True, username='unknown') except ImportError: server_manager",
"os.makedirs(joined_path) print('gameName: ' + game_name + 'in ' + save_path) gameInputPacket.iNumPlayers = num_participants",
"str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID",
"bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header,",
"for injected dll.\") gameInputPacket.bStartMatch = True # Wait 100 milliseconds then check for",
"+ 1) + \")\" # Truncate at 27 because we can have up",
"os.path.join(save_path, game_name) if not os.path.exists(joined_path): os.makedirs(joined_path) print('gameName: ' + game_name + 'in '",
"dict[name] = 1 else: count = dict[name] new_name = name[:27] + \"(\" +",
"= [] bot_modules = [] processes = [] callbacks = [] bot_parameter_list =",
"framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i)) loadout_header = BOT_CONFIG_LOADOUT_HEADER if (team_num == 1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)):",
"# Cut off at 31 characters and handle duplicates def get_sanitized_bot_name(dict, name): if",
"bot_config_path = participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config = configparser.RawConfigParser() if server_manager.download_config: if 'saltie' in os.path.basename(bot_config_path):",
"Make sure name does not exceed 31 characters dict[name] = 1 else: count",
"'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header, 'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception as e:",
"27 because we can have up to '(10)' appended dict[name] = count +",
"server_manager.load_config() num_team_0 = 0 # Set configuration values for bots and store name",
"participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config = configparser.RawConfigParser() if server_manager.download_config: if 'saltie' in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg')",
"config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception as e: print (\"Couldn't get model hash,\", e)",
"files participant_configs = get_bot_config_file_list(num_participants, framework_config) # Create empty lists bot_names = [] bot_teams",
"bm = bot_manager.BotManager(terminate_event, callback_event, config_file, name, team, index, module_name, game_name, save_data, server_uploader) bm.run()",
"= bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID =",
"Create empty lists bot_names = [] bot_teams = [] bot_modules = [] processes",
"False, False, False) print('config.py not present, cannot upload replays to collective server') print('Check",
"cannot upload replays to collective server') print('Check Discord server for information') if server_manager.error:",
"Terminate all process and then raise an exception quit_event.set() terminated = False while",
"True for callback in callbacks: if not callback.is_set(): terminated = False if __name__",
"GameInputPacket and map buffer buff = mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket = bi.GameInputPacket.from_buffer(buff) #",
"= bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID =",
"= dict[name] new_name = name[:27] + \"(\" + str(count + 1) + \")\"",
"e: print('username not set in config', e) print('using default username') # Retrieve bot",
"= framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill",
"= ServerConverter('', False, False, False) print('config.py not present, cannot upload replays to collective",
"= [] bot_teams = [] bot_modules = [] processes = [] callbacks =",
"== 1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam == 0: num_team_0 +=",
"gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID",
"game_name, save_data, server_manager)) process.start() print(\"Successfully configured bots. Setting flag for injected dll.\") gameInputPacket.bStartMatch",
"get model hash,\", e) server_manager.set_player_amount(num_participants, num_team_0) # Create Quit event quit_event = mp.Event()",
"at 31 characters and handle duplicates def get_sanitized_bot_name(dict, name): if name not in",
"bots. Setting flag for injected dll.\") gameInputPacket.bStartMatch = True # Wait 100 milliseconds",
"= 'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX",
"terminate before terminating main process terminated = False while not terminated: terminated =",
"bot_input_struct as bi import bot_manager import game_data_struct as gd import rlbot_exception from bot_code.conversions.server_converter",
"def run_agent(terminate_event, callback_event, config_file, name, team, index, module_name, game_name, save_data, server_uploader): bm =",
"save_data, server_uploader): bm = bot_manager.BotManager(terminate_event, callback_event, config_file, name, team, index, module_name, game_name, save_data,",
"print('username not set in config', e) print('using default username') # Retrieve bot config",
"save_path) gameInputPacket.iNumPlayers = num_participants server_manager.load_config() num_team_0 = 0 # Set configuration values for",
"loadout_header = BOT_CONFIG_LOADOUT_HEADER if (team_num == 1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER if",
"'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header, 'boost_id')",
"team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i)) loadout_header = BOT_CONFIG_LOADOUT_HEADER if (team_num == 1",
"exit\") msvcrt.getch() print(\"Shutting Down\") quit_event.set() # Wait for all processes to terminate before",
"= mp.Process(target=run_agent, args=(quit_event, callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i, bot_modules[i], save_path + '\\\\' +",
"name, team, index, module_name, game_name, save_data, server_uploader) bm.run() def main(): # Set up",
"new_name = name[:31] # Make sure name does not exceed 31 characters dict[name]",
"save_path + '\\\\' + game_name, save_data, server_manager)) process.start() print(\"Successfully configured bots. Setting flag",
"Configuration' PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_'",
"ctypes import io import mmap import msvcrt import multiprocessing as mp import os",
"= dict() save_data = True save_path = os.path.join(os.getcwd(), 'bot_code', 'training', 'replays') game_name =",
"+ save_path) gameInputPacket.iNumPlayers = num_participants server_manager.load_config() num_team_0 = 0 # Set configuration values",
"in the config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception as e: print (\"Couldn't get model",
"BOT_CONFIG_MODULE_HEADER = 'Bot Location' USER_CONFIGURATION_HEADER = 'User Info' BOT_CONFIG_AGENT_HEADER = 'Bot Parameters' try:",
"= 0 # Set configuration values for bots and store name and team",
"def main(): # Set up RLBot.cfg framework_config = configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) # Open anonymous",
"bot_config = configparser.RawConfigParser() if server_manager.download_config: if 'saltie' in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else: bot_config.read(bot_config_path)",
"1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam == 0: num_team_0 += 1",
"replays to collective server') print('Check Discord server for information') if server_manager.error: server_manager.warn_server('unable to",
"count = dict[name] new_name = name[:27] + \"(\" + str(count + 1) +",
"bot parameters') else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER,",
"= os.path.join(save_path, game_name) if not os.path.exists(joined_path): os.makedirs(joined_path) print('gameName: ' + game_name + 'in",
"'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant Loadout Orange' BOT_CONFIG_MODULE_HEADER = 'Bot Location' USER_CONFIGURATION_HEADER =",
"to exit\") msvcrt.getch() print(\"Shutting Down\") quit_event.set() # Wait for all processes to terminate",
"Set configuration values for bots and store name and team for i in",
"= True # Wait 100 milliseconds then check for an error code time.sleep(0.1)",
"Open anonymous shared memory for entire GameInputPacket and map buffer buff = mmap.mmap(-1,",
"to collective server') print('Check Discord server for information') if server_manager.error: server_manager.warn_server('unable to connect",
"for entire GameInputPacket and map buffer buff = mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket =",
"empty lists bot_names = [] bot_teams = [] bot_modules = [] processes =",
"try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception as e: bot_parameter_list.append(None) print('failed to load bot parameters') else:",
"not terminated: terminated = True for callback in callbacks: if not callback.is_set(): terminated",
"callback_event, config_file, name, team, index, module_name, game_name, save_data, server_uploader): bm = bot_manager.BotManager(terminate_event, callback_event,",
"= os.path.join(os.getcwd(), 'bot_code', 'training', 'replays') game_name = str(int(round(time.time() * 1000))) + '-' +",
"bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header,",
"bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header,",
"RLBot.cfg framework_config = configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) # Open anonymous shared memory for entire GameInputPacket",
"if not bot_output.iLastError == 0: # Terminate all process and then raise an",
"= bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID =",
"time import bot_input_struct as bi import bot_manager import game_data_struct as gd import rlbot_exception",
"load bot parameters') else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled:",
"get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header,",
"return config_file_list # Cut off at 31 characters and handle duplicates def get_sanitized_bot_name(dict,",
"range(num_participants): bot_config_path = participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config = configparser.RawConfigParser() if server_manager.download_config: if 'saltie' in",
"default username') # Retrieve bot config files participant_configs = get_bot_config_file_list(num_participants, framework_config) # Create",
"participant_configs = get_bot_config_file_list(num_participants, framework_config) # Create empty lists bot_names = [] bot_teams =",
"module_name, game_name, save_data, server_uploader): bm = bot_manager.BotManager(terminate_event, callback_event, config_file, name, team, index, module_name,",
"= bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID =",
"== 0: num_team_0 += 1 gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled =",
"else: count = dict[name] new_name = name[:27] + \"(\" + str(count + 1)",
"exception quit_event.set() terminated = False while not terminated: terminated = True for callback",
"import ctypes import io import mmap import msvcrt import multiprocessing as mp import",
"upload replays to collective server') print('Check Discord server for information') if server_manager.error: server_manager.warn_server('unable",
"server_manager.error: server_manager.warn_server('unable to connect to server') def get_bot_config_file_list(botCount, config): config_file_list = [] for",
"number of participants num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except Exception as",
"'training', 'replays') game_name = str(int(round(time.time() * 1000))) + '-' + str(random.randint(0, 1000)) if",
"except Exception as e: bot_parameter_list.append(None) print('failed to load bot parameters') else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header,",
"terminated = False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any character to exit\") msvcrt.getch() print(\"Shutting Down\")",
"'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception as e: bot_parameter_list.append(None) print('failed to load",
"\")\" # Truncate at 27 because we can have up to '(10)' appended",
"bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else: bot_config.read(bot_config_path) else: bot_config.read(bot_config_path) team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i)) loadout_header",
"in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX + str(i))) return config_file_list # Cut off at 31",
"new_name def run_agent(terminate_event, callback_event, config_file, name, team, index, module_name, game_name, save_data, server_uploader): bm",
"'saltie.cfg') else: bot_config.read(bot_config_path) else: bot_config.read(bot_config_path) team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i)) loadout_header =",
"rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any character to exit\") msvcrt.getch() print(\"Shutting Down\") quit_event.set() # Wait for",
"INPUT_SHARED_MEMORY_TAG) gameInputPacket = bi.GameInputPacket.from_buffer(buff) # Determine number of participants num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants')",
"= True save_path = os.path.join(os.getcwd(), 'bot_code', 'training', 'replays') game_name = str(int(round(time.time() * 1000)))",
"bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header, 'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception as",
"import bot_input_struct as bi import bot_manager import game_data_struct as gd import rlbot_exception from",
"import sys import random import time import bot_input_struct as bi import bot_manager import",
"team, index, module_name, game_name, save_data, server_uploader): bm = bot_manager.BotManager(terminate_event, callback_event, config_file, name, team,",
"print('failed to load bot parameters') else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i)))",
"game_name = str(int(round(time.time() * 1000))) + '-' + str(random.randint(0, 1000)) if save_data: print(save_path)",
"'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header, 'decal_id')",
"num_team_0) # Create Quit event quit_event = mp.Event() # Launch processes for i",
"name[:31] # Make sure name does not exceed 31 characters dict[name] = 1",
"framework_config) # Create empty lists bot_names = [] bot_teams = [] bot_modules =",
"os import sys import random import time import bot_input_struct as bi import bot_manager",
"random import time import bot_input_struct as bi import bot_manager import game_data_struct as gd",
"[] bot_modules = [] processes = [] callbacks = [] bot_parameter_list = []",
"count + 1 return new_name def run_agent(terminate_event, callback_event, config_file, name, team, index, module_name,",
"bm.run() def main(): # Set up RLBot.cfg framework_config = configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) # Open",
"processes to terminate before terminating main process terminated = False while not terminated:",
"+ ' does not exist creating') os.makedirs(save_path) joined_path = os.path.join(save_path, game_name) if not",
"config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX + str(i))) return config_file_list # Cut off at 31 characters and",
"= gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not bot_output.iLastError == 0: # Terminate all process and then",
"= 'Participant Loadout Orange' BOT_CONFIG_MODULE_HEADER = 'Bot Location' USER_CONFIGURATION_HEADER = 'User Info' BOT_CONFIG_AGENT_HEADER",
"'rlbot.cfg' RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration' INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER = 'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER",
"before terminating main process terminated = False while not terminated: terminated = True",
"Exception as e: print('username not set in config', e) print('using default username') #",
"game_name, save_data, server_uploader) bm.run() def main(): # Set up RLBot.cfg framework_config = configparser.RawConfigParser()",
"not present, cannot upload replays to collective server') print('Check Discord server for information')",
"if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback = mp.Event() callbacks.append(callback) process = mp.Process(target=run_agent, args=(quit_event, callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName),",
"and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam == 0: num_team_0 += 1 gameInputPacket.sPlayerConfiguration[i].bBot",
"if not callback.is_set(): terminated = False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any character to exit\")",
"and then raise an exception quit_event.set() terminated = False while not terminated: terminated",
"# Terminate all process and then raise an exception quit_event.set() terminated = False",
"# Truncate at 27 because we can have up to '(10)' appended dict[name]",
"'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header, 'goal_explosion_id')",
"dict() save_data = True save_path = os.path.join(os.getcwd(), 'bot_code', 'training', 'replays') game_name = str(int(round(time.time()",
"1000))) + '-' + str(random.randint(0, 1000)) if save_data: print(save_path) if not os.path.exists(save_path): print(os.path.dirname(save_path)",
"+ str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads the model based",
"if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads the model based on the",
"bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads the model based on the hash in",
"all processes to terminate before terminating main process terminated = False while not",
"terminating main process terminated = False while not terminated: terminated = True for",
"framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except Exception as e: print('username not set in",
"config', e) print('using default username') # Retrieve bot config files participant_configs = get_bot_config_file_list(num_participants,",
"bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header,",
"'(10)' appended dict[name] = count + 1 return new_name def run_agent(terminate_event, callback_event, config_file,",
"* 1000))) + '-' + str(random.randint(0, 1000)) if save_data: print(save_path) if not os.path.exists(save_path):",
"entire GameInputPacket and map buffer buff = mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket = bi.GameInputPacket.from_buffer(buff)",
"e: bot_parameter_list.append(None) print('failed to load bot parameters') else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX",
"# Retrieve bot config files participant_configs = get_bot_config_file_list(num_participants, framework_config) # Create empty lists",
"print(\"Press any character to exit\") msvcrt.getch() print(\"Shutting Down\") quit_event.set() # Wait for all",
"server_manager.set_player_amount(num_participants, num_team_0) # Create Quit event quit_event = mp.Event() # Launch processes for",
"def get_sanitized_bot_name(dict, name): if name not in dict: new_name = name[:31] # Make",
"# Determine number of participants num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except",
"i gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header, 'team_color_id')",
"sure name does not exceed 31 characters dict[name] = 1 else: count =",
"= 'RLBot Configuration' INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER = 'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant",
"import os import sys import random import time import bot_input_struct as bi import",
"name and team for i in range(num_participants): bot_config_path = participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config =",
"ServerConverter('http://saltie.tk:5000', True, True, True, username='unknown') except ImportError: server_manager = ServerConverter('', False, False, False)",
"gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID",
"in callbacks: if not callback.is_set(): terminated = False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any character",
"name[:27] + \"(\" + str(count + 1) + \")\" # Truncate at 27",
"gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not bot_output.iLastError == 0: # Terminate all process and then raise",
"process terminated = False while not terminated: terminated = True for callback in",
"PARTICPANT_TEAM_PREFIX = 'participant_team_' RLBOT_CONFIG_FILE = 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration' INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput'",
"100 milliseconds then check for an error code time.sleep(0.1) game_data_shared_memory = mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock),",
"exist creating') os.makedirs(save_path) joined_path = os.path.join(save_path, game_name) if not os.path.exists(joined_path): os.makedirs(joined_path) print('gameName: '",
"PARTICPANT_CONFIGURATION_HEADER = 'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_'",
"'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header, 'engine_audio_id')",
"io import mmap import msvcrt import multiprocessing as mp import os import sys",
"connect to server') def get_bot_config_file_list(botCount, config): config_file_list = [] for i in range(botCount):",
"'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX = 'participant_team_' RLBOT_CONFIG_FILE = 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER = 'RLBot",
"str(random.randint(0, 1000)) if save_data: print(save_path) if not os.path.exists(save_path): print(os.path.dirname(save_path) + ' does not",
"RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration' INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER = 'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER =",
"+ str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX",
"try: server_manager = ServerConverter('http://saltie.tk:5000', True, True, True, username='unknown') except ImportError: server_manager = ServerConverter('',",
"main process terminated = False while not terminated: terminated = True for callback",
"os.path.exists(joined_path): os.makedirs(joined_path) print('gameName: ' + game_name + 'in ' + save_path) gameInputPacket.iNumPlayers =",
"ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket = bi.GameInputPacket.from_buffer(buff) # Determine number of participants num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER,",
"i in range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback = mp.Event() callbacks.append(callback) process = mp.Process(target=run_agent, args=(quit_event,",
"range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback = mp.Event() callbacks.append(callback) process = mp.Process(target=run_agent, args=(quit_event, callback, bot_parameter_list[i],",
"= 'participant_team_' RLBOT_CONFIG_FILE = 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration' INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER",
"bot config files participant_configs = get_bot_config_file_list(num_participants, framework_config) # Create empty lists bot_names =",
"= framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except Exception as e: print('username not set",
"'bot_code', 'training', 'replays') game_name = str(int(round(time.time() * 1000))) + '-' + str(random.randint(0, 1000))",
"values for bots and store name and team for i in range(num_participants): bot_config_path",
"= bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID =",
"and handle duplicates def get_sanitized_bot_name(dict, name): if name not in dict: new_name =",
"1 return new_name def run_agent(terminate_event, callback_event, config_file, name, team, index, module_name, game_name, save_data,",
"gameInputPacket = bi.GameInputPacket.from_buffer(buff) # Determine number of participants num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try:",
"callbacks = [] bot_parameter_list = [] name_dict = dict() save_data = True save_path",
"exceed 31 characters dict[name] = 1 else: count = dict[name] new_name = name[:27]",
"get_sanitized_bot_name(dict, name): if name not in dict: new_name = name[:31] # Make sure",
"= 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX = 'participant_team_' RLBOT_CONFIG_FILE = 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER =",
"USER_CONFIGURATION_HEADER = 'User Info' BOT_CONFIG_AGENT_HEADER = 'Bot Parameters' try: server_manager = ServerConverter('http://saltie.tk:5000', True,",
"duplicates def get_sanitized_bot_name(dict, name): if name not in dict: new_name = name[:31] #",
"[] name_dict = dict() save_data = True save_path = os.path.join(os.getcwd(), 'bot_code', 'training', 'replays')",
"gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID",
"' + save_path) gameInputPacket.iNumPlayers = num_participants server_manager.load_config() num_team_0 = 0 # Set configuration",
"= configparser.RawConfigParser() if server_manager.download_config: if 'saltie' in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else: bot_config.read(bot_config_path) else:",
"bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header,",
"+ '\\\\' + game_name, save_data, server_manager)) process.start() print(\"Successfully configured bots. Setting flag for",
"else: bot_config.read(bot_config_path) team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i)) loadout_header = BOT_CONFIG_LOADOUT_HEADER if (team_num",
"def get_bot_config_file_list(botCount, config): config_file_list = [] for i in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX +",
"Setting flag for injected dll.\") gameInputPacket.bStartMatch = True # Wait 100 milliseconds then",
"gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header, 'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception as e: bot_parameter_list.append(None)",
"Determine number of participants num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except Exception",
"for all processes to terminate before terminating main process terminated = False while",
"the config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception as e: print (\"Couldn't get model hash,\",",
"bots and store name and team for i in range(num_participants): bot_config_path = participant_configs[i]",
"bot_output.iLastError == 0: # Terminate all process and then raise an exception quit_event.set()",
"import random import time import bot_input_struct as bi import bot_manager import game_data_struct as",
"to load bot parameters') else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i))) if",
"= participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config = configparser.RawConfigParser() if server_manager.download_config: if 'saltie' in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']),",
"as mp import os import sys import random import time import bot_input_struct as",
"'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header, 'paint_finish_1_id')",
"multiprocessing as mp import os import sys import random import time import bot_input_struct",
"e) print('using default username') # Retrieve bot config files participant_configs = get_bot_config_file_list(num_participants, framework_config)",
"except Exception as e: print('username not set in config', e) print('using default username')",
"= bot_config.getint(loadout_header, 'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception as e: bot_parameter_list.append(None) print('failed",
"team, index, module_name, game_name, save_data, server_uploader) bm.run() def main(): # Set up RLBot.cfg",
"gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads the model based on the hash",
"server_manager = ServerConverter('', False, False, False) print('config.py not present, cannot upload replays to",
"not set in config', e) print('using default username') # Retrieve bot config files",
"(\"Couldn't get model hash,\", e) server_manager.set_player_amount(num_participants, num_team_0) # Create Quit event quit_event =",
"of participants num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except Exception as e:",
"we can have up to '(10)' appended dict[name] = count + 1 return",
"'User Info' BOT_CONFIG_AGENT_HEADER = 'Bot Parameters' try: server_manager = ServerConverter('http://saltie.tk:5000', True, True, True,",
"for callback in callbacks: if not callback.is_set(): terminated = False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press",
"mmap import msvcrt import multiprocessing as mp import os import sys import random",
"present, cannot upload replays to collective server') print('Check Discord server for information') if",
"+ str(i))) return config_file_list # Cut off at 31 characters and handle duplicates",
"bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header,",
"callbacks: if not callback.is_set(): terminated = False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any character to",
"os.path.exists(save_path): print(os.path.dirname(save_path) + ' does not exist creating') os.makedirs(save_path) joined_path = os.path.join(save_path, game_name)",
"BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant Loadout Orange' BOT_CONFIG_MODULE_HEADER = 'Bot Location' USER_CONFIGURATION_HEADER = 'User Info'",
"# Wait 100 milliseconds then check for an error code time.sleep(0.1) game_data_shared_memory =",
"bot_parameter_list = [] name_dict = dict() save_data = True save_path = os.path.join(os.getcwd(), 'bot_code',",
"gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict, bot_config.get(loadout_header,",
"config_file_list # Cut off at 31 characters and handle duplicates def get_sanitized_bot_name(dict, name):",
"as gd import rlbot_exception from bot_code.conversions.server_converter import ServerConverter PARTICPANT_CONFIGURATION_HEADER = 'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX",
"num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except Exception as e: print('username not",
"terminated = True for callback in callbacks: if not callback.is_set(): terminated = False",
"'participant_team_' RLBOT_CONFIG_FILE = 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration' INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER =",
"gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID =",
"for i in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX + str(i))) return config_file_list # Cut off",
"game_name, save_data, server_uploader): bm = bot_manager.BotManager(terminate_event, callback_event, config_file, name, team, index, module_name, game_name,",
"quit_event.set() terminated = False while not terminated: terminated = True for callback in",
"gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID",
"save_data, server_manager)) process.start() print(\"Successfully configured bots. Setting flag for injected dll.\") gameInputPacket.bStartMatch =",
"= 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX =",
"= num_participants server_manager.load_config() num_team_0 = 0 # Set configuration values for bots and",
"Quit event quit_event = mp.Event() # Launch processes for i in range(num_participants): if",
"server_uploader): bm = bot_manager.BotManager(terminate_event, callback_event, config_file, name, team, index, module_name, game_name, save_data, server_uploader)",
"gameInputPacket.sPlayerConfiguration[i].ucTeam == 0: num_team_0 += 1 gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled",
"= 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX = 'participant_team_' RLBOT_CONFIG_FILE =",
"= configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) # Open anonymous shared memory for entire GameInputPacket and map",
"name, team, index, module_name, game_name, save_data, server_uploader): bm = bot_manager.BotManager(terminate_event, callback_event, config_file, name,",
"framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam",
"= 'Bot Location' USER_CONFIGURATION_HEADER = 'User Info' BOT_CONFIG_AGENT_HEADER = 'Bot Parameters' try: server_manager",
"BOT_CONFIG_LOADOUT_HEADER if (team_num == 1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam ==",
"memory for entire GameInputPacket and map buffer buff = mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket",
"= count + 1 return new_name def run_agent(terminate_event, callback_event, config_file, name, team, index,",
"bot_manager.BotManager(terminate_event, callback_event, config_file, name, team, index, module_name, game_name, save_data, server_uploader) bm.run() def main():",
"= framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i)) loadout_header = BOT_CONFIG_LOADOUT_HEADER if (team_num == 1 and",
"try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except Exception as e: print('username not set in config', e)",
"False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any character to exit\") msvcrt.getch() print(\"Shutting Down\") quit_event.set() #",
"Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant Loadout Orange' BOT_CONFIG_MODULE_HEADER = 'Bot Location' USER_CONFIGURATION_HEADER = 'User",
"based on the hash in the config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception as e:",
"dict: new_name = name[:31] # Make sure name does not exceed 31 characters",
"if save_data: print(save_path) if not os.path.exists(save_path): print(os.path.dirname(save_path) + ' does not exist creating')",
"Parameters' try: server_manager = ServerConverter('http://saltie.tk:5000', True, True, True, username='unknown') except ImportError: server_manager =",
"bot_names = [] bot_teams = [] bot_modules = [] processes = [] callbacks",
"PARTICPANT_RLBOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i gameInputPacket.sPlayerConfiguration[i].wName",
"Truncate at 27 because we can have up to '(10)' appended dict[name] =",
"= 'User Info' BOT_CONFIG_AGENT_HEADER = 'Bot Parameters' try: server_manager = ServerConverter('http://saltie.tk:5000', True, True,",
"[] bot_teams = [] bot_modules = [] processes = [] callbacks = []",
"bot_modules = [] processes = [] callbacks = [] bot_parameter_list = [] name_dict",
"time.sleep(0.1) game_data_shared_memory = mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not bot_output.iLastError ==",
"gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID =",
"module_name, game_name, save_data, server_uploader) bm.run() def main(): # Set up RLBot.cfg framework_config =",
"'saltie' in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else: bot_config.read(bot_config_path) else: bot_config.read(bot_config_path) team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX",
"on the hash in the config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception as e: print",
"num_team_0 = 0 # Set configuration values for bots and store name and",
"off at 31 characters and handle duplicates def get_sanitized_bot_name(dict, name): if name not",
"True, True, username='unknown') except ImportError: server_manager = ServerConverter('', False, False, False) print('config.py not",
"' does not exist creating') os.makedirs(save_path) joined_path = os.path.join(save_path, game_name) if not os.path.exists(joined_path):",
"config_file, name, team, index, module_name, game_name, save_data, server_uploader) bm.run() def main(): # Set",
"character to exit\") msvcrt.getch() print(\"Shutting Down\") quit_event.set() # Wait for all processes to",
"mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket = bi.GameInputPacket.from_buffer(buff) # Determine number of participants num_participants =",
"in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else: bot_config.read(bot_config_path) else: bot_config.read(bot_config_path) team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX +",
"import game_data_struct as gd import rlbot_exception from bot_code.conversions.server_converter import ServerConverter PARTICPANT_CONFIGURATION_HEADER = 'Participant",
"Retrieve bot config files participant_configs = get_bot_config_file_list(num_participants, framework_config) # Create empty lists bot_names",
"i, bot_modules[i], save_path + '\\\\' + game_name, save_data, server_manager)) process.start() print(\"Successfully configured bots.",
"hash in the config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception as e: print (\"Couldn't get",
"0 # Set configuration values for bots and store name and team for",
"error code time.sleep(0.1) game_data_shared_memory = mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not",
"server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception as e: print (\"Couldn't get model hash,\", e) server_manager.set_player_amount(num_participants, num_team_0)",
"while not terminated: terminated = True for callback in callbacks: if not callback.is_set():",
"msvcrt import multiprocessing as mp import os import sys import random import time",
"shared memory for entire GameInputPacket and map buffer buff = mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG)",
"str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX +",
"= bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID =",
"store name and team for i in range(num_participants): bot_config_path = participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config",
"0: num_team_0 += 1 gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean(",
"mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not bot_output.iLastError == 0: # Terminate",
"gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX + str(i))",
"= ServerConverter('http://saltie.tk:5000', True, True, True, username='unknown') except ImportError: server_manager = ServerConverter('', False, False,",
"quit_event.set() # Wait for all processes to terminate before terminating main process terminated",
"dict[name] new_name = name[:27] + \"(\" + str(count + 1) + \")\" #",
"'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header, 'antenna_id')",
"to connect to server') def get_bot_config_file_list(botCount, config): config_file_list = [] for i in",
"INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER = 'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant Loadout Orange' BOT_CONFIG_MODULE_HEADER",
"# Set up RLBot.cfg framework_config = configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) # Open anonymous shared memory",
"Down\") quit_event.set() # Wait for all processes to terminate before terminating main process",
"as e: print (\"Couldn't get model hash,\", e) server_manager.set_player_amount(num_participants, num_team_0) # Create Quit",
"gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID",
"BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam == 0: num_team_0 += 1 gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX +",
"+ \")\" # Truncate at 27 because we can have up to '(10)'",
"'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX = 'participant_team_' RLBOT_CONFIG_FILE = 'rlbot.cfg'",
"= [] name_dict = dict() save_data = True save_path = os.path.join(os.getcwd(), 'bot_code', 'training',",
"have up to '(10)' appended dict[name] = count + 1 return new_name def",
"gameInputPacket.iNumPlayers = num_participants server_manager.load_config() num_team_0 = 0 # Set configuration values for bots",
"in range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback = mp.Event() callbacks.append(callback) process = mp.Process(target=run_agent, args=(quit_event, callback,",
"process = mp.Process(target=run_agent, args=(quit_event, callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i, bot_modules[i], save_path + '\\\\'",
"and map buffer buff = mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket = bi.GameInputPacket.from_buffer(buff) # Determine",
"bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception as e: bot_parameter_list.append(None) print('failed to load bot parameters') else: bot_parameter_list.append(None)",
"= BOT_CONFIG_LOADOUT_HEADER if (team_num == 1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam",
"mp.Event() # Launch processes for i in range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback = mp.Event()",
"not exist creating') os.makedirs(save_path) joined_path = os.path.join(save_path, game_name) if not os.path.exists(joined_path): os.makedirs(joined_path) print('gameName:",
"game_data_shared_memory = mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not bot_output.iLastError == 0:",
"= 'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant Loadout Orange' BOT_CONFIG_MODULE_HEADER = 'Bot Location' USER_CONFIGURATION_HEADER",
"print('Check Discord server for information') if server_manager.error: server_manager.warn_server('unable to connect to server') def",
"= team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header,",
"gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID",
"an error code time.sleep(0.1) game_data_shared_memory = mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if",
"= 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration' INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER = 'Participant Loadout'",
"try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception as e: print (\"Couldn't get model hash,\", e) server_manager.set_player_amount(num_participants,",
"buff = mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket = bi.GameInputPacket.from_buffer(buff) # Determine number of participants",
"str(int(round(time.time() * 1000))) + '-' + str(random.randint(0, 1000)) if save_data: print(save_path) if not",
"from bot_code.conversions.server_converter import ServerConverter PARTICPANT_CONFIGURATION_HEADER = 'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX =",
"bot_manager import game_data_struct as gd import rlbot_exception from bot_code.conversions.server_converter import ServerConverter PARTICPANT_CONFIGURATION_HEADER =",
"False, False) print('config.py not present, cannot upload replays to collective server') print('Check Discord",
"[] bot_parameter_list = [] name_dict = dict() save_data = True save_path = os.path.join(os.getcwd(),",
"if name not in dict: new_name = name[:31] # Make sure name does",
"processes = [] callbacks = [] bot_parameter_list = [] name_dict = dict() save_data",
"bot_config.read(bot_config_path) else: bot_config.read(bot_config_path) team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i)) loadout_header = BOT_CONFIG_LOADOUT_HEADER if",
"terminated = False while not terminated: terminated = True for callback in callbacks:",
"can have up to '(10)' appended dict[name] = count + 1 return new_name",
"loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam == 0: num_team_0 += 1 gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER,",
"= i gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header,",
"set in config', e) print('using default username') # Retrieve bot config files participant_configs",
"config): config_file_list = [] for i in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX + str(i))) return",
"PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX",
"save_data = True save_path = os.path.join(os.getcwd(), 'bot_code', 'training', 'replays') game_name = str(int(round(time.time() *",
"Location' USER_CONFIGURATION_HEADER = 'User Info' BOT_CONFIG_AGENT_HEADER = 'Bot Parameters' try: server_manager = ServerConverter('http://saltie.tk:5000',",
"terminated: terminated = True for callback in callbacks: if not callback.is_set(): terminated =",
"import time import bot_input_struct as bi import bot_manager import game_data_struct as gd import",
"i in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX + str(i))) return config_file_list # Cut off at",
"for i in range(num_participants): bot_config_path = participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config = configparser.RawConfigParser() if server_manager.download_config:",
"<gh_stars>0 import configparser import ctypes import io import mmap import msvcrt import multiprocessing",
"# Launch processes for i in range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback = mp.Event() callbacks.append(callback)",
"'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header, 'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER])",
"PARTICPANT_CONFIG_KEY_PREFIX + str(i))) return config_file_list # Cut off at 31 characters and handle",
"'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header, 'paint_finish_2_id')",
"all process and then raise an exception quit_event.set() terminated = False while not",
"ServerConverter PARTICPANT_CONFIGURATION_HEADER = 'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX =",
"except ImportError: server_manager = ServerConverter('', False, False, False) print('config.py not present, cannot upload",
"hash,\", e) server_manager.set_player_amount(num_participants, num_team_0) # Create Quit event quit_event = mp.Event() # Launch",
"bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header,",
"get_bot_config_file_list(num_participants, framework_config) # Create empty lists bot_names = [] bot_teams = [] bot_modules",
"os.makedirs(save_path) joined_path = os.path.join(save_path, game_name) if not os.path.exists(joined_path): os.makedirs(joined_path) print('gameName: ' + game_name",
"bot_config.getint(loadout_header, 'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception as e: bot_parameter_list.append(None) print('failed to",
"PARTICPANT_BOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER,",
"main(): # Set up RLBot.cfg framework_config = configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) # Open anonymous shared",
"PARTICPANT_TEAM_PREFIX + str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads the model",
"dll.\") gameInputPacket.bStartMatch = True # Wait 100 milliseconds then check for an error",
"+ \"(\" + str(count + 1) + \")\" # Truncate at 27 because",
"does not exceed 31 characters dict[name] = 1 else: count = dict[name] new_name",
"bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i, bot_modules[i], save_path + '\\\\' + game_name, save_data, server_manager)) process.start()",
"team for i in range(num_participants): bot_config_path = participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config = configparser.RawConfigParser() if",
"+ 1 return new_name def run_agent(terminate_event, callback_event, config_file, name, team, index, module_name, game_name,",
"if (team_num == 1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam == 0:",
"PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam =",
"else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else:",
"if server_manager.download_config: if 'saltie' in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else: bot_config.read(bot_config_path) else: bot_config.read(bot_config_path) team_num",
"num_team_0 += 1 gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER,",
"not in dict: new_name = name[:31] # Make sure name does not exceed",
"callback_event, config_file, name, team, index, module_name, game_name, save_data, server_uploader) bm.run() def main(): #",
"gameInputPacket.bStartMatch = True # Wait 100 milliseconds then check for an error code",
"Set up RLBot.cfg framework_config = configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) # Open anonymous shared memory for",
"server for information') if server_manager.error: server_manager.warn_server('unable to connect to server') def get_bot_config_file_list(botCount, config):",
"= framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex",
"bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header,",
"PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX = 'participant_team_' RLBOT_CONFIG_FILE = 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration'",
"False while not terminated: terminated = True for callback in callbacks: if not",
"callback = mp.Event() callbacks.append(callback) process = mp.Process(target=run_agent, args=(quit_event, callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i,",
"= mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not bot_output.iLastError == 0: #",
"= mp.Event() callbacks.append(callback) process = mp.Process(target=run_agent, args=(quit_event, callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i, bot_modules[i],",
"characters and handle duplicates def get_sanitized_bot_name(dict, name): if name not in dict: new_name",
"str(i)) loadout_header = BOT_CONFIG_LOADOUT_HEADER if (team_num == 1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER",
"callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i, bot_modules[i], save_path + '\\\\' + game_name, save_data, server_manager))",
"= bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID =",
"configuration values for bots and store name and team for i in range(num_participants):",
"msvcrt.getch() print(\"Shutting Down\") quit_event.set() # Wait for all processes to terminate before terminating",
"configparser.RawConfigParser() if server_manager.download_config: if 'saltie' in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else: bot_config.read(bot_config_path) else: bot_config.read(bot_config_path)",
"= False while not terminated: terminated = True for callback in callbacks: if",
"= 1 else: count = dict[name] new_name = name[:27] + \"(\" + str(count",
"as bi import bot_manager import game_data_struct as gd import rlbot_exception from bot_code.conversions.server_converter import",
"bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads the model based on the hash in the config try:",
"any character to exit\") msvcrt.getch() print(\"Shutting Down\") quit_event.set() # Wait for all processes",
"= [] callbacks = [] bot_parameter_list = [] name_dict = dict() save_data =",
"+ str(i)) loadout_header = BOT_CONFIG_LOADOUT_HEADER if (team_num == 1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header =",
"framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill =",
"Launch processes for i in range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback = mp.Event() callbacks.append(callback) process",
"collective server') print('Check Discord server for information') if server_manager.error: server_manager.warn_server('unable to connect to",
"gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i))",
"Info' BOT_CONFIG_AGENT_HEADER = 'Bot Parameters' try: server_manager = ServerConverter('http://saltie.tk:5000', True, True, True, username='unknown')",
"the model based on the hash in the config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception",
"mp.Process(target=run_agent, args=(quit_event, callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i, bot_modules[i], save_path + '\\\\' + game_name,",
"= [] for i in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX + str(i))) return config_file_list #",
"game_data_struct as gd import rlbot_exception from bot_code.conversions.server_converter import ServerConverter PARTICPANT_CONFIGURATION_HEADER = 'Participant Configuration'",
"injected dll.\") gameInputPacket.bStartMatch = True # Wait 100 milliseconds then check for an",
"up to '(10)' appended dict[name] = count + 1 return new_name def run_agent(terminate_event,",
"return new_name def run_agent(terminate_event, callback_event, config_file, name, team, index, module_name, game_name, save_data, server_uploader):",
"'-' + str(random.randint(0, 1000)) if save_data: print(save_path) if not os.path.exists(save_path): print(os.path.dirname(save_path) + '",
"= get_sanitized_bot_name(name_dict, bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID =",
"mp import os import sys import random import time import bot_input_struct as bi",
"1) + \")\" # Truncate at 27 because we can have up to",
"in callbacks: if not callback.is_set(): terminated = False if __name__ == '__main__': main()",
"str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads the model based on",
"# downloads the model based on the hash in the config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash'])",
"and team for i in range(num_participants): bot_config_path = participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config = configparser.RawConfigParser()",
"information') if server_manager.error: server_manager.warn_server('unable to connect to server') def get_bot_config_file_list(botCount, config): config_file_list =",
"'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads",
"= mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket = bi.GameInputPacket.from_buffer(buff) # Determine number of participants num_participants",
"os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else: bot_config.read(bot_config_path) else: bot_config.read(bot_config_path) team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i))",
"config_file_list = [] for i in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX + str(i))) return config_file_list",
"'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header, 'trails_id')",
"bot_teams = [] bot_modules = [] processes = [] callbacks = [] bot_parameter_list",
"import rlbot_exception from bot_code.conversions.server_converter import ServerConverter PARTICPANT_CONFIGURATION_HEADER = 'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_'",
"except Exception as e: print (\"Couldn't get model hash,\", e) server_manager.set_player_amount(num_participants, num_team_0) #",
"model based on the hash in the config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except Exception as",
"anonymous shared memory for entire GameInputPacket and map buffer buff = mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket),",
"str(i))) return config_file_list # Cut off at 31 characters and handle duplicates def",
"milliseconds then check for an error code time.sleep(0.1) game_data_shared_memory = mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG)",
"gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID",
"[] for i in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX + str(i))) return config_file_list # Cut",
"'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except Exception as e: print('username not set in config',",
"save_data, server_uploader) bm.run() def main(): # Set up RLBot.cfg framework_config = configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE)",
"if not os.path.exists(save_path): print(os.path.dirname(save_path) + ' does not exist creating') os.makedirs(save_path) joined_path =",
"framework_config = configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) # Open anonymous shared memory for entire GameInputPacket and",
"+ 'in ' + save_path) gameInputPacket.iNumPlayers = num_participants server_manager.load_config() num_team_0 = 0 #",
"to '(10)' appended dict[name] = count + 1 return new_name def run_agent(terminate_event, callback_event,",
"processes for i in range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback = mp.Event() callbacks.append(callback) process =",
"bot_config.get(loadout_header, 'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header, 'custom_color_id')",
"framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex =",
"= BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam == 0: num_team_0 += 1 gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX",
"flag for injected dll.\") gameInputPacket.bStartMatch = True # Wait 100 milliseconds then check",
"= name[:31] # Make sure name does not exceed 31 characters dict[name] =",
"not bot_output.iLastError == 0: # Terminate all process and then raise an exception",
"not callback.is_set(): terminated = False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any character to exit\") msvcrt.getch()",
"name): if name not in dict: new_name = name[:31] # Make sure name",
"= bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header, 'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception",
"ServerConverter('', False, False, False) print('config.py not present, cannot upload replays to collective server')",
"bi.GameInputPacket.from_buffer(buff) # Determine number of participants num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username'))",
"'Participant Loadout Orange' BOT_CONFIG_MODULE_HEADER = 'Bot Location' USER_CONFIGURATION_HEADER = 'User Info' BOT_CONFIG_AGENT_HEADER =",
"team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header, 'car_id')",
"Exception as e: bot_parameter_list.append(None) print('failed to load bot parameters') else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name'))",
"True save_path = os.path.join(os.getcwd(), 'bot_code', 'training', 'replays') game_name = str(int(round(time.time() * 1000))) +",
"game_name) if not os.path.exists(joined_path): os.makedirs(joined_path) print('gameName: ' + game_name + 'in ' +",
"BOT_CONFIG_LOADOUT_HEADER = 'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant Loadout Orange' BOT_CONFIG_MODULE_HEADER = 'Bot Location'",
"Wait for all processes to terminate before terminating main process terminated = False",
"if server_manager.error: server_manager.warn_server('unable to connect to server') def get_bot_config_file_list(botCount, config): config_file_list = []",
"ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not bot_output.iLastError == 0: # Terminate all",
"callback in callbacks: if not callback.is_set(): terminated = False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any",
"raise an exception quit_event.set() terminated = False while not terminated: terminated = True",
"'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER = 'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant Loadout Orange' BOT_CONFIG_MODULE_HEADER = 'Bot",
"= 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX = 'participant_team_' RLBOT_CONFIG_FILE = 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration' INPUT_SHARED_MEMORY_TAG",
"mp.Event() callbacks.append(callback) process = mp.Process(target=run_agent, args=(quit_event, callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i, bot_modules[i], save_path",
"'username')) except Exception as e: print('username not set in config', e) print('using default",
"server_manager)) process.start() print(\"Successfully configured bots. Setting flag for injected dll.\") gameInputPacket.bStartMatch = True",
"bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header, 'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try:",
"'RLBot Configuration' INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER = 'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant Loadout",
"dict[name] = count + 1 return new_name def run_agent(terminate_event, callback_event, config_file, name, team,",
"participants num_participants = framework_config.getint(RLBOT_CONFIGURATION_HEADER, 'num_participants') try: server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except Exception as e: print('username",
"1 gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX +",
"map buffer buff = mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket = bi.GameInputPacket.from_buffer(buff) # Determine number",
"= [] processes = [] callbacks = [] bot_parameter_list = [] name_dict =",
"server_manager.download_config: if 'saltie' in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else: bot_config.read(bot_config_path) else: bot_config.read(bot_config_path) team_num =",
"Create Quit event quit_event = mp.Event() # Launch processes for i in range(num_participants):",
"num_participants server_manager.load_config() num_team_0 = 0 # Set configuration values for bots and store",
"as e: print('username not set in config', e) print('using default username') # Retrieve",
"config_file, name, team, index, module_name, game_name, save_data, server_uploader): bm = bot_manager.BotManager(terminate_event, callback_event, config_file,",
"= bot_manager.BotManager(terminate_event, callback_event, config_file, name, team, index, module_name, game_name, save_data, server_uploader) bm.run() def",
"31 characters dict[name] = 1 else: count = dict[name] new_name = name[:27] +",
"= get_bot_config_file_list(num_participants, framework_config) # Create empty lists bot_names = [] bot_teams = []",
"check for an error code time.sleep(0.1) game_data_shared_memory = mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output =",
"bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not bot_output.iLastError == 0: # Terminate all process and",
"gd import rlbot_exception from bot_code.conversions.server_converter import ServerConverter PARTICPANT_CONFIGURATION_HEADER = 'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX =",
"server_uploader) bm.run() def main(): # Set up RLBot.cfg framework_config = configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) #",
"'Bot Location' USER_CONFIGURATION_HEADER = 'User Info' BOT_CONFIG_AGENT_HEADER = 'Bot Parameters' try: server_manager =",
"bot_modules[i], save_path + '\\\\' + game_name, save_data, server_manager)) process.start() print(\"Successfully configured bots. Setting",
"31 characters and handle duplicates def get_sanitized_bot_name(dict, name): if name not in dict:",
"Configuration' INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER = 'Participant Loadout' BOT_CONFIG_LOADOUT_ORANGE_HEADER = 'Participant Loadout Orange'",
"new_name = name[:27] + \"(\" + str(count + 1) + \")\" # Truncate",
"server_manager.set_player_username(framework_config.get(USER_CONFIGURATION_HEADER, 'username')) except Exception as e: print('username not set in config', e) print('using",
"True, username='unknown') except ImportError: server_manager = ServerConverter('', False, False, False) print('config.py not present,",
"username='unknown') except ImportError: server_manager = ServerConverter('', False, False, False) print('config.py not present, cannot",
"get_bot_config_file_list(botCount, config): config_file_list = [] for i in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX + str(i)))",
"str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i gameInputPacket.sPlayerConfiguration[i].wName = get_sanitized_bot_name(name_dict,",
"config files participant_configs = get_bot_config_file_list(num_participants, framework_config) # Create empty lists bot_names = []",
"'replays') game_name = str(int(round(time.time() * 1000))) + '-' + str(random.randint(0, 1000)) if save_data:",
"bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') #",
"else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads the model based on the hash in the config",
"\"(\" + str(count + 1) + \")\" # Truncate at 27 because we",
"= bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID = bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID =",
"BOT_CONFIG_AGENT_HEADER = 'Bot Parameters' try: server_manager = ServerConverter('http://saltie.tk:5000', True, True, True, username='unknown') except",
"name not in dict: new_name = name[:31] # Make sure name does not",
"'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header, 'hat_id')",
"server') def get_bot_config_file_list(botCount, config): config_file_list = [] for i in range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX",
"ImportError: server_manager = ServerConverter('', False, False, False) print('config.py not present, cannot upload replays",
"+ str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i gameInputPacket.sPlayerConfiguration[i].wName =",
"event quit_event = mp.Event() # Launch processes for i in range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled:",
"callback in callbacks: if not callback.is_set(): terminated = False if __name__ == '__main__':",
"# Create Quit event quit_event = mp.Event() # Launch processes for i in",
"parameters') else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module'))",
"# Create empty lists bot_names = [] bot_teams = [] bot_modules = []",
"bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not bot_output.iLastError == 0: # Terminate all process",
"configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) # Open anonymous shared memory for entire GameInputPacket and map buffer",
"+= 1 gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].bRLBotControlled = framework_config.getboolean( PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX",
"downloads the model based on the hash in the config try: server_manager.load_model(bot_config[BOT_CONFIG_AGENT_HEADER]['model_hash']) except",
"joined_path = os.path.join(save_path, game_name) if not os.path.exists(joined_path): os.makedirs(joined_path) print('gameName: ' + game_name +",
"to terminate before terminating main process terminated = False while not terminated: terminated",
"bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam == 0: num_team_0 += 1 gameInputPacket.sPlayerConfiguration[i].bBot =",
"else: bot_config.read(bot_config_path) else: bot_config.read(bot_config_path) team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i)) loadout_header = BOT_CONFIG_LOADOUT_HEADER",
"not exceed 31 characters dict[name] = 1 else: count = dict[name] new_name =",
"configparser import ctypes import io import mmap import msvcrt import multiprocessing as mp",
"appended dict[name] = count + 1 return new_name def run_agent(terminate_event, callback_event, config_file, name,",
"import mmap import msvcrt import multiprocessing as mp import os import sys import",
"import io import mmap import msvcrt import multiprocessing as mp import os import",
"Discord server for information') if server_manager.error: server_manager.warn_server('unable to connect to server') def get_bot_config_file_list(botCount,",
"str(count + 1) + \")\" # Truncate at 27 because we can have",
"gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID = bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID",
"'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads the model based on the hash in the",
"name_dict = dict() save_data = True save_path = os.path.join(os.getcwd(), 'bot_code', 'training', 'replays') game_name",
"'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX =",
"print('using default username') # Retrieve bot config files participant_configs = get_bot_config_file_list(num_participants, framework_config) #",
"as e: bot_parameter_list.append(None) print('failed to load bot parameters') else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER,",
"for bots and store name and team for i in range(num_participants): bot_config_path =",
"bot_teams[i], i, bot_modules[i], save_path + '\\\\' + game_name, save_data, server_manager)) process.start() print(\"Successfully configured",
"Orange' BOT_CONFIG_MODULE_HEADER = 'Bot Location' USER_CONFIGURATION_HEADER = 'User Info' BOT_CONFIG_AGENT_HEADER = 'Bot Parameters'",
"e: print (\"Couldn't get model hash,\", e) server_manager.set_player_amount(num_participants, num_team_0) # Create Quit event",
"bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i))) if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: bot_modules.append(bot_config.get(BOT_CONFIG_MODULE_HEADER, 'agent_module')) else: bot_modules.append('NO_MODULE_FOR_PARTICIPANT') # downloads the",
"process and then raise an exception quit_event.set() terminated = False while not terminated:",
"+ str(random.randint(0, 1000)) if save_data: print(save_path) if not os.path.exists(save_path): print(os.path.dirname(save_path) + ' does",
"Cut off at 31 characters and handle duplicates def get_sanitized_bot_name(dict, name): if name",
"'\\\\' + game_name, save_data, server_manager)) process.start() print(\"Successfully configured bots. Setting flag for injected",
"sys.path.append(os.path.dirname(bot_config_path)) bot_config = configparser.RawConfigParser() if server_manager.download_config: if 'saltie' in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else:",
"bot_config.read(bot_config_path) team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX + str(i)) loadout_header = BOT_CONFIG_LOADOUT_HEADER if (team_num ==",
"for callback in callbacks: if not callback.is_set(): terminated = False if __name__ ==",
"(team_num == 1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header = BOT_CONFIG_LOADOUT_ORANGE_HEADER if gameInputPacket.sPlayerConfiguration[i].ucTeam == 0: num_team_0",
"+ '-' + str(random.randint(0, 1000)) if save_data: print(save_path) if not os.path.exists(save_path): print(os.path.dirname(save_path) +",
"characters dict[name] = 1 else: count = dict[name] new_name = name[:27] + \"(\"",
"Exception as e: print (\"Couldn't get model hash,\", e) server_manager.set_player_amount(num_participants, num_team_0) # Create",
"print(\"Shutting Down\") quit_event.set() # Wait for all processes to terminate before terminating main",
"gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header, 'goal_explosion_id') if",
"RLBOT_CONFIG_FILE = 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration' INPUT_SHARED_MEMORY_TAG = 'Local\\\\RLBotInput' BOT_CONFIG_LOADOUT_HEADER = 'Participant",
"= mp.Event() # Launch processes for i in range(num_participants): if gameInputPacket.sPlayerConfiguration[i].bRLBotControlled: callback =",
"configured bots. Setting flag for injected dll.\") gameInputPacket.bStartMatch = True # Wait 100",
"= False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any character to exit\") msvcrt.getch() print(\"Shutting Down\") quit_event.set()",
"buffer buff = mmap.mmap(-1, ctypes.sizeof(bi.GameInputPacket), INPUT_SHARED_MEMORY_TAG) gameInputPacket = bi.GameInputPacket.from_buffer(buff) # Determine number of",
"i in range(num_participants): bot_config_path = participant_configs[i] sys.path.append(os.path.dirname(bot_config_path)) bot_config = configparser.RawConfigParser() if server_manager.download_config: if",
"gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header, 'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except",
"PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX = 'participant_team_' RLBOT_CONFIG_FILE",
"True for callback in callbacks: if not callback.is_set(): terminated = False raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError)",
"1 else: count = dict[name] new_name = name[:27] + \"(\" + str(count +",
"PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX = 'participant_team_' RLBOT_CONFIG_FILE = 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER",
"[] processes = [] callbacks = [] bot_parameter_list = [] name_dict = dict()",
"# Set configuration values for bots and store name and team for i",
"'participant_bot_skill_' PARTICPANT_TEAM_PREFIX = 'participant_team_' RLBOT_CONFIG_FILE = 'rlbot.cfg' RLBOT_CONFIGURATION_HEADER = 'RLBot Configuration' INPUT_SHARED_MEMORY_TAG =",
"= name[:27] + \"(\" + str(count + 1) + \")\" # Truncate at",
"import bot_manager import game_data_struct as gd import rlbot_exception from bot_code.conversions.server_converter import ServerConverter PARTICPANT_CONFIGURATION_HEADER",
"in config', e) print('using default username') # Retrieve bot config files participant_configs =",
"if gameInputPacket.sPlayerConfiguration[i].ucTeam == 0: num_team_0 += 1 gameInputPacket.sPlayerConfiguration[i].bBot = framework_config.getboolean(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_KEY_PREFIX + str(i))",
"PARTICPANT_TEAM_PREFIX + str(i)) loadout_header = BOT_CONFIG_LOADOUT_HEADER if (team_num == 1 and bot_config.has_section(BOT_CONFIG_LOADOUT_ORANGE_HEADER)): loadout_header",
"save_data: print(save_path) if not os.path.exists(save_path): print(os.path.dirname(save_path) + ' does not exist creating') os.makedirs(save_path)",
"== 0: # Terminate all process and then raise an exception quit_event.set() terminated",
"print(os.path.dirname(save_path) + ' does not exist creating') os.makedirs(save_path) joined_path = os.path.join(save_path, game_name) if",
"because we can have up to '(10)' appended dict[name] = count + 1",
"creating') os.makedirs(save_path) joined_path = os.path.join(save_path, game_name) if not os.path.exists(joined_path): os.makedirs(joined_path) print('gameName: ' +",
"bot_parameter_list.append(None) print('failed to load bot parameters') else: bot_parameter_list.append(None) bot_names.append(bot_config.get(loadout_header, 'name')) bot_teams.append(framework_config.getint(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_TEAM_PREFIX +",
"lists bot_names = [] bot_teams = [] bot_modules = [] processes = []",
"'name')) gameInputPacket.sPlayerConfiguration[i].ucTeam = team_num gameInputPacket.sPlayerConfiguration[i].ucTeamColorID = bot_config.getint(loadout_header, 'team_color_id') gameInputPacket.sPlayerConfiguration[i].ucCustomColorID = bot_config.getint(loadout_header, 'custom_color_id') gameInputPacket.sPlayerConfiguration[i].iCarID",
"rlbot_exception from bot_code.conversions.server_converter import ServerConverter PARTICPANT_CONFIGURATION_HEADER = 'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX",
"not os.path.exists(save_path): print(os.path.dirname(save_path) + ' does not exist creating') os.makedirs(save_path) joined_path = os.path.join(save_path,",
"index, module_name, game_name, save_data, server_uploader): bm = bot_manager.BotManager(terminate_event, callback_event, config_file, name, team, index,",
"0: # Terminate all process and then raise an exception quit_event.set() terminated =",
"bi import bot_manager import game_data_struct as gd import rlbot_exception from bot_code.conversions.server_converter import ServerConverter",
"an exception quit_event.set() terminated = False while not terminated: terminated = True for",
"bot_config.getint(loadout_header, 'boost_id') gameInputPacket.sPlayerConfiguration[i].iAntennaID = bot_config.getint(loadout_header, 'antenna_id') gameInputPacket.sPlayerConfiguration[i].iHatID = bot_config.getint(loadout_header, 'hat_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header,",
"# Open anonymous shared memory for entire GameInputPacket and map buffer buff =",
"bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception as e: bot_parameter_list.append(None) print('failed to load bot parameters')",
"up RLBot.cfg framework_config = configparser.RawConfigParser() framework_config.read(RLBOT_CONFIG_FILE) # Open anonymous shared memory for entire",
"= bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID = bot_config.getint(loadout_header, 'trails_id') gameInputPacket.sPlayerConfiguration[i].iGoalExplosionID = bot_config.getint(loadout_header, 'goal_explosion_id') if bot_config.has_section(BOT_CONFIG_AGENT_HEADER):",
"args=(quit_event, callback, bot_parameter_list[i], str(gameInputPacket.sPlayerConfiguration[i].wName), bot_teams[i], i, bot_modules[i], save_path + '\\\\' + game_name, save_data,",
"for information') if server_manager.error: server_manager.warn_server('unable to connect to server') def get_bot_config_file_list(botCount, config): config_file_list",
"= True for callback in callbacks: if not callback.is_set(): terminated = False raise",
"e) server_manager.set_player_amount(num_participants, num_team_0) # Create Quit event quit_event = mp.Event() # Launch processes",
"name does not exceed 31 characters dict[name] = 1 else: count = dict[name]",
"if 'saltie' in os.path.basename(bot_config_path): bot_config._read(io.StringIO(server_manager.config_response.json()['content']), 'saltie.cfg') else: bot_config.read(bot_config_path) else: bot_config.read(bot_config_path) team_num = framework_config.getint(PARTICPANT_CONFIGURATION_HEADER,",
"import msvcrt import multiprocessing as mp import os import sys import random import",
"framework_config.read(RLBOT_CONFIG_FILE) # Open anonymous shared memory for entire GameInputPacket and map buffer buff",
"= bot_config.getint(loadout_header, 'car_id') gameInputPacket.sPlayerConfiguration[i].iDecalID = bot_config.getint(loadout_header, 'decal_id') gameInputPacket.sPlayerConfiguration[i].iWheelsID = bot_config.getint(loadout_header, 'wheels_id') gameInputPacket.sPlayerConfiguration[i].iBoostID =",
"code time.sleep(0.1) game_data_shared_memory = mmap.mmap(-1, ctypes.sizeof(gd.GameTickPacketWithLock), bot_manager.OUTPUT_SHARED_MEMORY_TAG) bot_output = gd.GameTickPacketWithLock.from_buffer(game_data_shared_memory) if not bot_output.iLastError",
"# Make sure name does not exceed 31 characters dict[name] = 1 else:",
"range(botCount): config_file_list.append(config.get(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_CONFIG_KEY_PREFIX + str(i))) return config_file_list # Cut off at 31 characters",
"+ game_name + 'in ' + save_path) gameInputPacket.iNumPlayers = num_participants server_manager.load_config() num_team_0 =",
"print('config.py not present, cannot upload replays to collective server') print('Check Discord server for",
"index, module_name, game_name, save_data, server_uploader) bm.run() def main(): # Set up RLBot.cfg framework_config",
"bot_code.conversions.server_converter import ServerConverter PARTICPANT_CONFIGURATION_HEADER = 'Participant Configuration' PARTICPANT_BOT_KEY_PREFIX = 'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_'",
"process.start() print(\"Successfully configured bots. Setting flag for injected dll.\") gameInputPacket.bStartMatch = True #",
"= str(int(round(time.time() * 1000))) + '-' + str(random.randint(0, 1000)) if save_data: print(save_path) if",
"= True for callback in callbacks: if not callback.is_set(): terminated = False if",
"import multiprocessing as mp import os import sys import random import time import",
"if bot_config.has_section(BOT_CONFIG_AGENT_HEADER): try: bot_parameter_list.append(bot_config[BOT_CONFIG_AGENT_HEADER]) except Exception as e: bot_parameter_list.append(None) print('failed to load bot",
"= [] bot_parameter_list = [] name_dict = dict() save_data = True save_path =",
"PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_RLBOT_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].fBotSkill = framework_config.getfloat(PARTICPANT_CONFIGURATION_HEADER, PARTICPANT_BOT_SKILL_KEY_PREFIX + str(i)) gameInputPacket.sPlayerConfiguration[i].iPlayerIndex = i",
"at 27 because we can have up to '(10)' appended dict[name] = count",
"os.path.join(os.getcwd(), 'bot_code', 'training', 'replays') game_name = str(int(round(time.time() * 1000))) + '-' + str(random.randint(0,",
"# Wait for all processes to terminate before terminating main process terminated =",
"does not exist creating') os.makedirs(save_path) joined_path = os.path.join(save_path, game_name) if not os.path.exists(joined_path): os.makedirs(joined_path)",
"raise rlbot_exception.RLBotException().raise_exception_from_error_code(bot_output.iLastError) print(\"Press any character to exit\") msvcrt.getch() print(\"Shutting Down\") quit_event.set() # Wait",
"gameInputPacket.sPlayerConfiguration[i].iPaintFinish1ID = bot_config.getint(loadout_header, 'paint_finish_1_id') gameInputPacket.sPlayerConfiguration[i].iPaintFinish2ID = bot_config.getint(loadout_header, 'paint_finish_2_id') gameInputPacket.sPlayerConfiguration[i].iEngineAudioID = bot_config.getint(loadout_header, 'engine_audio_id') gameInputPacket.sPlayerConfiguration[i].iTrailsID",
"'participant_is_bot_' PARTICPANT_RLBOT_KEY_PREFIX = 'participant_is_rlbot_controlled_' PARTICPANT_CONFIG_KEY_PREFIX = 'participant_config_' PARTICPANT_BOT_SKILL_KEY_PREFIX = 'participant_bot_skill_' PARTICPANT_TEAM_PREFIX = 'participant_team_'",
"username') # Retrieve bot config files participant_configs = get_bot_config_file_list(num_participants, framework_config) # Create empty"
] |
[
"import PATH_DATA_PROCESSED class FER(torchvision.datasets.ImageFolder): def __init__(self, num_classes: int, **kwargs): kwargs['root'] = PATH_DATA_PROCESSED.joinpath(f'FER{num_classes}').as_posix() super().__init__(**kwargs)",
"import torchvision from ..paths import PATH_DATA_PROCESSED class FER(torchvision.datasets.ImageFolder): def __init__(self, num_classes: int, **kwargs):",
"from ..paths import PATH_DATA_PROCESSED class FER(torchvision.datasets.ImageFolder): def __init__(self, num_classes: int, **kwargs): kwargs['root'] =",
"torchvision from ..paths import PATH_DATA_PROCESSED class FER(torchvision.datasets.ImageFolder): def __init__(self, num_classes: int, **kwargs): kwargs['root']",
"..paths import PATH_DATA_PROCESSED class FER(torchvision.datasets.ImageFolder): def __init__(self, num_classes: int, **kwargs): kwargs['root'] = PATH_DATA_PROCESSED.joinpath(f'FER{num_classes}').as_posix()"
] |
[
"while p < q: tmp = nums[i] + nums[p] +nums[q] if abs(tmp -",
"= 0x7fffffff sz = len(nums) nums.sort() for i in range(sz-2): if i >",
"- 1 b = True while p < q: tmp = nums[i] +",
"p += 1 if ans == target: break return ans # @lc code=end",
"i + 1 q = sz - 1 b = True while p",
"continue p = i + 1 q = sz - 1 b =",
"nums[i-1]: continue p = i + 1 q = sz - 1 b",
"# # @lc app=leetcode id=16 lang=python3 # # [16] 3Sum Closest # #",
"-= 1 else: p += 1 if ans == target: break return ans",
"target) < abs(ans - target): ans = tmp if tmp == target: break",
"== target: break elif tmp > target: q -= 1 else: p +=",
"threeSumClosest(self, nums: List[int], target: int) -> int: ans = 0x7fffffff sz = len(nums)",
"id=16 lang=python3 # # [16] 3Sum Closest # # @lc code=start from typing",
"= len(nums) nums.sort() for i in range(sz-2): if i > 0 and nums[i]",
"nums[i] == nums[i-1]: continue p = i + 1 q = sz -",
"abs(tmp - target) < abs(ans - target): ans = tmp if tmp ==",
"ans = 0x7fffffff sz = len(nums) nums.sort() for i in range(sz-2): if i",
"@lc app=leetcode id=16 lang=python3 # # [16] 3Sum Closest # # @lc code=start",
"= nums[i] + nums[p] +nums[q] if abs(tmp - target) < abs(ans - target):",
"- target) < abs(ans - target): ans = tmp if tmp == target:",
"tmp > target: q -= 1 else: p += 1 if ans ==",
"target: int) -> int: ans = 0x7fffffff sz = len(nums) nums.sort() for i",
"+ 1 q = sz - 1 b = True while p <",
"List[int], target: int) -> int: ans = 0x7fffffff sz = len(nums) nums.sort() for",
"List class Solution: def threeSumClosest(self, nums: List[int], target: int) -> int: ans =",
"i in range(sz-2): if i > 0 and nums[i] == nums[i-1]: continue p",
"= i + 1 q = sz - 1 b = True while",
"app=leetcode id=16 lang=python3 # # [16] 3Sum Closest # # @lc code=start from",
"abs(ans - target): ans = tmp if tmp == target: break elif tmp",
"-> int: ans = 0x7fffffff sz = len(nums) nums.sort() for i in range(sz-2):",
"from typing import List class Solution: def threeSumClosest(self, nums: List[int], target: int) ->",
"tmp == target: break elif tmp > target: q -= 1 else: p",
"Closest # # @lc code=start from typing import List class Solution: def threeSumClosest(self,",
"p = i + 1 q = sz - 1 b = True",
"> target: q -= 1 else: p += 1 if ans == target:",
"# @lc app=leetcode id=16 lang=python3 # # [16] 3Sum Closest # # @lc",
"> 0 and nums[i] == nums[i-1]: continue p = i + 1 q",
"for i in range(sz-2): if i > 0 and nums[i] == nums[i-1]: continue",
"= sz - 1 b = True while p < q: tmp =",
"sz = len(nums) nums.sort() for i in range(sz-2): if i > 0 and",
"in range(sz-2): if i > 0 and nums[i] == nums[i-1]: continue p =",
"0 and nums[i] == nums[i-1]: continue p = i + 1 q =",
"int: ans = 0x7fffffff sz = len(nums) nums.sort() for i in range(sz-2): if",
"len(nums) nums.sort() for i in range(sz-2): if i > 0 and nums[i] ==",
"target: q -= 1 else: p += 1 if ans == target: break",
"and nums[i] == nums[i-1]: continue p = i + 1 q = sz",
"i > 0 and nums[i] == nums[i-1]: continue p = i + 1",
"== nums[i-1]: continue p = i + 1 q = sz - 1",
"# # @lc code=start from typing import List class Solution: def threeSumClosest(self, nums:",
"= tmp if tmp == target: break elif tmp > target: q -=",
"1 b = True while p < q: tmp = nums[i] + nums[p]",
"1 q = sz - 1 b = True while p < q:",
"target: break elif tmp > target: q -= 1 else: p += 1",
"q: tmp = nums[i] + nums[p] +nums[q] if abs(tmp - target) < abs(ans",
"lang=python3 # # [16] 3Sum Closest # # @lc code=start from typing import",
"p < q: tmp = nums[i] + nums[p] +nums[q] if abs(tmp - target)",
"import List class Solution: def threeSumClosest(self, nums: List[int], target: int) -> int: ans",
"1 else: p += 1 if ans == target: break return ans #",
"0x7fffffff sz = len(nums) nums.sort() for i in range(sz-2): if i > 0",
"3Sum Closest # # @lc code=start from typing import List class Solution: def",
"if abs(tmp - target) < abs(ans - target): ans = tmp if tmp",
"tmp = nums[i] + nums[p] +nums[q] if abs(tmp - target) < abs(ans -",
"# @lc code=start from typing import List class Solution: def threeSumClosest(self, nums: List[int],",
"nums[i] + nums[p] +nums[q] if abs(tmp - target) < abs(ans - target): ans",
"int) -> int: ans = 0x7fffffff sz = len(nums) nums.sort() for i in",
"if i > 0 and nums[i] == nums[i-1]: continue p = i +",
"code=start from typing import List class Solution: def threeSumClosest(self, nums: List[int], target: int)",
"< abs(ans - target): ans = tmp if tmp == target: break elif",
"True while p < q: tmp = nums[i] + nums[p] +nums[q] if abs(tmp",
"# [16] 3Sum Closest # # @lc code=start from typing import List class",
"@lc code=start from typing import List class Solution: def threeSumClosest(self, nums: List[int], target:",
"q -= 1 else: p += 1 if ans == target: break return",
"break elif tmp > target: q -= 1 else: p += 1 if",
"range(sz-2): if i > 0 and nums[i] == nums[i-1]: continue p = i",
"class Solution: def threeSumClosest(self, nums: List[int], target: int) -> int: ans = 0x7fffffff",
"tmp if tmp == target: break elif tmp > target: q -= 1",
"+nums[q] if abs(tmp - target) < abs(ans - target): ans = tmp if",
"Solution: def threeSumClosest(self, nums: List[int], target: int) -> int: ans = 0x7fffffff sz",
"- target): ans = tmp if tmp == target: break elif tmp >",
"b = True while p < q: tmp = nums[i] + nums[p] +nums[q]",
"q = sz - 1 b = True while p < q: tmp",
"target): ans = tmp if tmp == target: break elif tmp > target:",
"nums: List[int], target: int) -> int: ans = 0x7fffffff sz = len(nums) nums.sort()",
"def threeSumClosest(self, nums: List[int], target: int) -> int: ans = 0x7fffffff sz =",
"elif tmp > target: q -= 1 else: p += 1 if ans",
"[16] 3Sum Closest # # @lc code=start from typing import List class Solution:",
"ans = tmp if tmp == target: break elif tmp > target: q",
"if tmp == target: break elif tmp > target: q -= 1 else:",
"# # [16] 3Sum Closest # # @lc code=start from typing import List",
"= True while p < q: tmp = nums[i] + nums[p] +nums[q] if",
"< q: tmp = nums[i] + nums[p] +nums[q] if abs(tmp - target) <",
"+ nums[p] +nums[q] if abs(tmp - target) < abs(ans - target): ans =",
"sz - 1 b = True while p < q: tmp = nums[i]",
"else: p += 1 if ans == target: break return ans # @lc",
"typing import List class Solution: def threeSumClosest(self, nums: List[int], target: int) -> int:",
"nums[p] +nums[q] if abs(tmp - target) < abs(ans - target): ans = tmp",
"nums.sort() for i in range(sz-2): if i > 0 and nums[i] == nums[i-1]:"
] |
[
"DataSet from ._datastore import DataStore from ._environment import Environment from ._project import Project",
"from ._datastore import DataStore from ._environment import Environment from ._project import Project from",
"* __all__ = [ \"DataSet\", \"DataStore\", \"Environment\", \"Project\", \"SecretStore\", \"_Spark\", \"Spark\", \"TypeMapping\", \"ProjectVersionInvalid\",",
"import _Spark, Spark from ._type_mapping import TypeMapping from ._exceptions import * __all__ =",
"Spark from ._type_mapping import TypeMapping from ._exceptions import * __all__ = [ \"DataSet\",",
"TypeMapping from ._exceptions import * __all__ = [ \"DataSet\", \"DataStore\", \"Environment\", \"Project\", \"SecretStore\",",
"from ._exceptions import * __all__ = [ \"DataSet\", \"DataStore\", \"Environment\", \"Project\", \"SecretStore\", \"_Spark\",",
"._dataset import DataSet from ._datastore import DataStore from ._environment import Environment from ._project",
"= [ \"DataSet\", \"DataStore\", \"Environment\", \"Project\", \"SecretStore\", \"_Spark\", \"Spark\", \"TypeMapping\", \"ProjectVersionInvalid\", \"ProjectDirectoryNotSet\", \"ProjectDirectoryNotExists\",",
"import DataStore from ._environment import Environment from ._project import Project from ._secret_store import",
"import SecretStore from ._spark import _Spark, Spark from ._type_mapping import TypeMapping from ._exceptions",
"Project from ._secret_store import SecretStore from ._spark import _Spark, Spark from ._type_mapping import",
"_Spark, Spark from ._type_mapping import TypeMapping from ._exceptions import * __all__ = [",
"from ._dataset import DataSet from ._datastore import DataStore from ._environment import Environment from",
"._exceptions import * __all__ = [ \"DataSet\", \"DataStore\", \"Environment\", \"Project\", \"SecretStore\", \"_Spark\", \"Spark\",",
"SecretStore from ._spark import _Spark, Spark from ._type_mapping import TypeMapping from ._exceptions import",
"._type_mapping import TypeMapping from ._exceptions import * __all__ = [ \"DataSet\", \"DataStore\", \"Environment\",",
"from ._project import Project from ._secret_store import SecretStore from ._spark import _Spark, Spark",
"Environment from ._project import Project from ._secret_store import SecretStore from ._spark import _Spark,",
"import * __all__ = [ \"DataSet\", \"DataStore\", \"Environment\", \"Project\", \"SecretStore\", \"_Spark\", \"Spark\", \"TypeMapping\",",
"._secret_store import SecretStore from ._spark import _Spark, Spark from ._type_mapping import TypeMapping from",
"__all__ = [ \"DataSet\", \"DataStore\", \"Environment\", \"Project\", \"SecretStore\", \"_Spark\", \"Spark\", \"TypeMapping\", \"ProjectVersionInvalid\", \"ProjectDirectoryNotSet\",",
"from ._secret_store import SecretStore from ._spark import _Spark, Spark from ._type_mapping import TypeMapping",
"DataStore from ._environment import Environment from ._project import Project from ._secret_store import SecretStore",
"[ \"DataSet\", \"DataStore\", \"Environment\", \"Project\", \"SecretStore\", \"_Spark\", \"Spark\", \"TypeMapping\", \"ProjectVersionInvalid\", \"ProjectDirectoryNotSet\", \"ProjectDirectoryNotExists\", ]",
"from ._type_mapping import TypeMapping from ._exceptions import * __all__ = [ \"DataSet\", \"DataStore\",",
"._environment import Environment from ._project import Project from ._secret_store import SecretStore from ._spark",
"._datastore import DataStore from ._environment import Environment from ._project import Project from ._secret_store",
"import TypeMapping from ._exceptions import * __all__ = [ \"DataSet\", \"DataStore\", \"Environment\", \"Project\",",
"import DataSet from ._datastore import DataStore from ._environment import Environment from ._project import",
"from ._spark import _Spark, Spark from ._type_mapping import TypeMapping from ._exceptions import *",
"from ._environment import Environment from ._project import Project from ._secret_store import SecretStore from",
"import Project from ._secret_store import SecretStore from ._spark import _Spark, Spark from ._type_mapping",
"._project import Project from ._secret_store import SecretStore from ._spark import _Spark, Spark from",
"._spark import _Spark, Spark from ._type_mapping import TypeMapping from ._exceptions import * __all__",
"import Environment from ._project import Project from ._secret_store import SecretStore from ._spark import"
] |
[
"logging import asyncio from discord.ext import commands, tasks from dotenv import load_dotenv from",
"discord.Client() class Onload(commands.Cog): def __init__(self, client): self.client = client @commands.Cog.listener() async def on_ready(self):",
"client): self.client = client @commands.Cog.listener() async def on_ready(self): print('Bot is online.') def setup(client):",
"asyncio from discord.ext import commands, tasks from dotenv import load_dotenv from itertools import",
"os from discord.errors import ClientException import dotenv import logging import asyncio from discord.ext",
"from itertools import cycle client = discord.Client() class Onload(commands.Cog): def __init__(self, client): self.client",
"discord.errors import ClientException import dotenv import logging import asyncio from discord.ext import commands,",
"__init__(self, client): self.client = client @commands.Cog.listener() async def on_ready(self): print('Bot is online.') def",
"dotenv import load_dotenv from itertools import cycle client = discord.Client() class Onload(commands.Cog): def",
"import cycle client = discord.Client() class Onload(commands.Cog): def __init__(self, client): self.client = client",
"ClientException import dotenv import logging import asyncio from discord.ext import commands, tasks from",
"from discord.ext import commands, tasks from dotenv import load_dotenv from itertools import cycle",
"client = discord.Client() class Onload(commands.Cog): def __init__(self, client): self.client = client @commands.Cog.listener() async",
"from dotenv import load_dotenv from itertools import cycle client = discord.Client() class Onload(commands.Cog):",
"discord import os from discord.errors import ClientException import dotenv import logging import asyncio",
"class Onload(commands.Cog): def __init__(self, client): self.client = client @commands.Cog.listener() async def on_ready(self): print('Bot",
"Onload(commands.Cog): def __init__(self, client): self.client = client @commands.Cog.listener() async def on_ready(self): print('Bot is",
"import asyncio from discord.ext import commands, tasks from dotenv import load_dotenv from itertools",
"cycle client = discord.Client() class Onload(commands.Cog): def __init__(self, client): self.client = client @commands.Cog.listener()",
"tasks from dotenv import load_dotenv from itertools import cycle client = discord.Client() class",
"<filename>cogs/onload.py<gh_stars>0 import discord import os from discord.errors import ClientException import dotenv import logging",
"discord.ext import commands, tasks from dotenv import load_dotenv from itertools import cycle client",
"import ClientException import dotenv import logging import asyncio from discord.ext import commands, tasks",
"load_dotenv from itertools import cycle client = discord.Client() class Onload(commands.Cog): def __init__(self, client):",
"import dotenv import logging import asyncio from discord.ext import commands, tasks from dotenv",
"dotenv import logging import asyncio from discord.ext import commands, tasks from dotenv import",
"def __init__(self, client): self.client = client @commands.Cog.listener() async def on_ready(self): print('Bot is online.')",
"= discord.Client() class Onload(commands.Cog): def __init__(self, client): self.client = client @commands.Cog.listener() async def",
"import os from discord.errors import ClientException import dotenv import logging import asyncio from",
"commands, tasks from dotenv import load_dotenv from itertools import cycle client = discord.Client()",
"import discord import os from discord.errors import ClientException import dotenv import logging import",
"import load_dotenv from itertools import cycle client = discord.Client() class Onload(commands.Cog): def __init__(self,",
"from discord.errors import ClientException import dotenv import logging import asyncio from discord.ext import",
"itertools import cycle client = discord.Client() class Onload(commands.Cog): def __init__(self, client): self.client =",
"self.client = client @commands.Cog.listener() async def on_ready(self): print('Bot is online.') def setup(client): client.add_cog(Onload(client))",
"import commands, tasks from dotenv import load_dotenv from itertools import cycle client =",
"import logging import asyncio from discord.ext import commands, tasks from dotenv import load_dotenv"
] |
[
"pertb_dets = np.array([[[1, 2, 6, 6, 0.3, 1, 0.995]], [[0, 1, 2, 2,",
"metric type. \"\"\" impl = DetectorRISE('hamming') assert impl.proximity_metric == 'hamming' def test_shape_sanity(self) ->",
"unittest import TestCase import numpy as np import os from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE",
"1, 0.81]], [[1, 1, 6, 6, 0.5, 1, 0.625]], [[0, 2, 3, 5,",
"is usable. \"\"\" impl = DetectorRISE() assert impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency) def test_default_param(self)",
"impl.proximity_metric == 'hamming' def test_shape_sanity(self) -> None: \"\"\" Test basic scoring with a",
"0.81]], [[1, 1, 6, 6, 0.5, 1, 0.625]], [[0, 2, 3, 5, 0.03,",
"def test_standard_detection(self) -> None: \"\"\" Test basic scoring on known values and non-square",
"0.89]]) pertb_dets = np.array([[[1, 2, 6, 6, 0.3, 1, 0.995]], [[0, 1, 2,",
"3, 0.01, 1, 0.07]],]) sal = impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6) standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy'))",
"= impl.generate(image1_dets, pertb_dets, pertb_mask) assert sal.shape == (2, 15, 25) def test_standard_detection(self) ->",
"1, 0.995]], [[0, 1, 2, 2, 0.2, 2, 0.03]], [[1, 0, 2, 2,",
"Test if implementation is usable. \"\"\" impl = DetectorRISE() assert impl.is_usable() and isinstance(impl,",
"in configuration_test_helper(impl): assert i.proximity_metric == 'euclidean' def test_metric_args(self) -> None: \"\"\" Test non-default",
"== 'cosine' def test_get_config(self) -> None: \"\"\" Test expected configuation behavior. \"\"\" impl",
"implementation is usable. \"\"\" impl = DetectorRISE() assert impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency) def",
"and isinstance(impl, GenerateDetectorProposalSaliency) def test_default_param(self) -> None: \"\"\" Test default construction. \"\"\" impl",
"for i in configuration_test_helper(impl): assert i.proximity_metric == 'euclidean' def test_metric_args(self) -> None: \"\"\"",
"single feature for broadcasting sanity check. \"\"\" impl = DetectorRISE() np.random.seed(2) image1_dets =",
"pertb_dets, EXPECTED_MASKS_4x6) standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy')) assert sal.shape == (1, 4, 6) assert",
"assert impl.proximity_metric == 'hamming' def test_shape_sanity(self) -> None: \"\"\" Test basic scoring with",
"sanity check. \"\"\" impl = DetectorRISE() np.random.seed(2) image1_dets = np.random.rand(2, (7)) pertb_dets =",
"image1_dets = np.random.rand(2, (7)) pertb_dets = np.random.rand(10, 2, (7)) pertb_mask = np.random.randint(low=0, high=2,",
"Test default construction. \"\"\" impl = DetectorRISE() assert impl.proximity_metric == 'cosine' def test_get_config(self)",
"[[0, 1, 2, 2, 0.2, 2, 0.03]], [[1, 0, 2, 2, 0.45, 1,",
"numpy as np import os from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE from xaitk_saliency import GenerateDetectorProposalSaliency",
"for broadcasting sanity check. \"\"\" impl = DetectorRISE() np.random.seed(2) image1_dets = np.random.rand(2, (7))",
"= np.random.randint(low=0, high=2, size=(10, 15, 25), dtype='int') sal = impl.generate(image1_dets, pertb_dets, pertb_mask) assert",
"2, 2, 0.45, 1, 0.81]], [[1, 1, 6, 6, 0.5, 1, 0.625]], [[0,",
"(7)) pertb_mask = np.random.randint(low=0, high=2, size=(10, 15, 25), dtype='int') sal = impl.generate(image1_dets, pertb_dets,",
"GenerateDetectorProposalSaliency from smqtk_core.configuration import configuration_test_helper from tests import DATA_DIR, EXPECTED_MASKS_4x6 class TestSimilarityScoring (TestCase):",
"check. \"\"\" impl = DetectorRISE() np.random.seed(2) image1_dets = np.random.rand(2, (7)) pertb_dets = np.random.rand(10,",
"TestSimilarityScoring (TestCase): def test_init_(self) -> None: \"\"\" Test if implementation is usable. \"\"\"",
"test_standard_detection(self) -> None: \"\"\" Test basic scoring on known values and non-square masks.",
"np.array([[1, 1, 4, 3, 0, 1, 0.89]]) pertb_dets = np.array([[[1, 2, 6, 6,",
"1, 0.56]], [[1, 2, 6, 3, 0.01, 1, 0.07]],]) sal = impl.generate(image1_dets, pertb_dets,",
"2, 2, 0.2, 2, 0.03]], [[1, 0, 2, 2, 0.45, 1, 0.81]], [[1,",
"feature for broadcasting sanity check. \"\"\" impl = DetectorRISE() np.random.seed(2) image1_dets = np.random.rand(2,",
"EXPECTED_MASKS_4x6) standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy')) assert sal.shape == (1, 4, 6) assert np.allclose(standard_sal,",
"known values and non-square masks. \"\"\" impl = DetectorRISE() image1_dets = np.array([[1, 1,",
"if implementation is usable. \"\"\" impl = DetectorRISE() assert impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency)",
"DATA_DIR, EXPECTED_MASKS_4x6 class TestSimilarityScoring (TestCase): def test_init_(self) -> None: \"\"\" Test if implementation",
"DetectorRISE() np.random.seed(2) image1_dets = np.random.rand(2, (7)) pertb_dets = np.random.rand(10, 2, (7)) pertb_mask =",
"os from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE from xaitk_saliency import GenerateDetectorProposalSaliency from smqtk_core.configuration import configuration_test_helper",
"2, 6, 6, 0.3, 1, 0.995]], [[0, 1, 2, 2, 0.2, 2, 0.03]],",
"and non-square masks. \"\"\" impl = DetectorRISE() image1_dets = np.array([[1, 1, 4, 3,",
"= np.random.rand(10, 2, (7)) pertb_mask = np.random.randint(low=0, high=2, size=(10, 15, 25), dtype='int') sal",
"import TestCase import numpy as np import os from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE from",
"type. \"\"\" impl = DetectorRISE('hamming') assert impl.proximity_metric == 'hamming' def test_shape_sanity(self) -> None:",
"sal.shape == (2, 15, 25) def test_standard_detection(self) -> None: \"\"\" Test basic scoring",
"6, 6, 0.3, 1, 0.995]], [[0, 1, 2, 2, 0.2, 2, 0.03]], [[1,",
"1, 2, 2, 0.2, 2, 0.03]], [[1, 0, 2, 2, 0.45, 1, 0.81]],",
"test_init_(self) -> None: \"\"\" Test if implementation is usable. \"\"\" impl = DetectorRISE()",
"= DetectorRISE() assert impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency) def test_default_param(self) -> None: \"\"\" Test",
"\"\"\" Test basic scoring on known values and non-square masks. \"\"\" impl =",
"values and non-square masks. \"\"\" impl = DetectorRISE() image1_dets = np.array([[1, 1, 4,",
"from smqtk_core.configuration import configuration_test_helper from tests import DATA_DIR, EXPECTED_MASKS_4x6 class TestSimilarityScoring (TestCase): def",
"2, 0.45, 1, 0.81]], [[1, 1, 6, 6, 0.5, 1, 0.625]], [[0, 2,",
"15, 25), dtype='int') sal = impl.generate(image1_dets, pertb_dets, pertb_mask) assert sal.shape == (2, 15,",
"import os from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE from xaitk_saliency import GenerateDetectorProposalSaliency from smqtk_core.configuration import",
"broadcasting sanity check. \"\"\" impl = DetectorRISE() np.random.seed(2) image1_dets = np.random.rand(2, (7)) pertb_dets",
"0.03, 1, 0.56]], [[1, 2, 6, 3, 0.01, 1, 0.07]],]) sal = impl.generate(image1_dets,",
"dtype='int') sal = impl.generate(image1_dets, pertb_dets, pertb_mask) assert sal.shape == (2, 15, 25) def",
"image1_dets = np.array([[1, 1, 4, 3, 0, 1, 0.89]]) pertb_dets = np.array([[[1, 2,",
"GenerateDetectorProposalSaliency) def test_default_param(self) -> None: \"\"\" Test default construction. \"\"\" impl = DetectorRISE()",
"non-default metric type. \"\"\" impl = DetectorRISE('hamming') assert impl.proximity_metric == 'hamming' def test_shape_sanity(self)",
"25), dtype='int') sal = impl.generate(image1_dets, pertb_dets, pertb_mask) assert sal.shape == (2, 15, 25)",
"2, 0.2, 2, 0.03]], [[1, 0, 2, 2, 0.45, 1, 0.81]], [[1, 1,",
"0, 2, 2, 0.45, 1, 0.81]], [[1, 1, 6, 6, 0.5, 1, 0.625]],",
"4, 3, 0, 1, 0.89]]) pertb_dets = np.array([[[1, 2, 6, 6, 0.3, 1,",
"i.proximity_metric == 'euclidean' def test_metric_args(self) -> None: \"\"\" Test non-default metric type. \"\"\"",
"class TestSimilarityScoring (TestCase): def test_init_(self) -> None: \"\"\" Test if implementation is usable.",
"6, 0.3, 1, 0.995]], [[0, 1, 2, 2, 0.2, 2, 0.03]], [[1, 0,",
"1, 6, 6, 0.5, 1, 0.625]], [[0, 2, 3, 5, 0.03, 1, 0.56]],",
"np import os from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE from xaitk_saliency import GenerateDetectorProposalSaliency from smqtk_core.configuration",
"6, 3, 0.01, 1, 0.07]],]) sal = impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6) standard_sal = np.load(os.path.join(DATA_DIR,",
"0.45, 1, 0.81]], [[1, 1, 6, 6, 0.5, 1, 0.625]], [[0, 2, 3,",
"None: \"\"\" Test default construction. \"\"\" impl = DetectorRISE() assert impl.proximity_metric == 'cosine'",
"expected configuation behavior. \"\"\" impl = DetectorRISE('euclidean') for i in configuration_test_helper(impl): assert i.proximity_metric",
"1, 4, 3, 0, 1, 0.89]]) pertb_dets = np.array([[[1, 2, 6, 6, 0.3,",
"test_metric_args(self) -> None: \"\"\" Test non-default metric type. \"\"\" impl = DetectorRISE('hamming') assert",
"None: \"\"\" Test non-default metric type. \"\"\" impl = DetectorRISE('hamming') assert impl.proximity_metric ==",
"assert impl.proximity_metric == 'cosine' def test_get_config(self) -> None: \"\"\" Test expected configuation behavior.",
"15, 25) def test_standard_detection(self) -> None: \"\"\" Test basic scoring on known values",
"None: \"\"\" Test expected configuation behavior. \"\"\" impl = DetectorRISE('euclidean') for i in",
"(TestCase): def test_init_(self) -> None: \"\"\" Test if implementation is usable. \"\"\" impl",
"from xaitk_saliency import GenerateDetectorProposalSaliency from smqtk_core.configuration import configuration_test_helper from tests import DATA_DIR, EXPECTED_MASKS_4x6",
"impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency) def test_default_param(self) -> None: \"\"\" Test default construction. \"\"\"",
"basic scoring with a single feature for broadcasting sanity check. \"\"\" impl =",
"impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6) standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy')) assert sal.shape == (1, 4, 6)",
"np.random.randint(low=0, high=2, size=(10, 15, 25), dtype='int') sal = impl.generate(image1_dets, pertb_dets, pertb_mask) assert sal.shape",
"[[1, 0, 2, 2, 0.45, 1, 0.81]], [[1, 1, 6, 6, 0.5, 1,",
"5, 0.03, 1, 0.56]], [[1, 2, 6, 3, 0.01, 1, 0.07]],]) sal =",
"import GenerateDetectorProposalSaliency from smqtk_core.configuration import configuration_test_helper from tests import DATA_DIR, EXPECTED_MASKS_4x6 class TestSimilarityScoring",
"default construction. \"\"\" impl = DetectorRISE() assert impl.proximity_metric == 'cosine' def test_get_config(self) ->",
"impl = DetectorRISE() assert impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency) def test_default_param(self) -> None: \"\"\"",
"DetectorRISE() assert impl.proximity_metric == 'cosine' def test_get_config(self) -> None: \"\"\" Test expected configuation",
"configuration_test_helper from tests import DATA_DIR, EXPECTED_MASKS_4x6 class TestSimilarityScoring (TestCase): def test_init_(self) -> None:",
"i in configuration_test_helper(impl): assert i.proximity_metric == 'euclidean' def test_metric_args(self) -> None: \"\"\" Test",
"test_shape_sanity(self) -> None: \"\"\" Test basic scoring with a single feature for broadcasting",
"impl = DetectorRISE('euclidean') for i in configuration_test_helper(impl): assert i.proximity_metric == 'euclidean' def test_metric_args(self)",
"0.2, 2, 0.03]], [[1, 0, 2, 2, 0.45, 1, 0.81]], [[1, 1, 6,",
"def test_init_(self) -> None: \"\"\" Test if implementation is usable. \"\"\" impl =",
"Test basic scoring on known values and non-square masks. \"\"\" impl = DetectorRISE()",
"= DetectorRISE('euclidean') for i in configuration_test_helper(impl): assert i.proximity_metric == 'euclidean' def test_metric_args(self) ->",
"from unittest import TestCase import numpy as np import os from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import",
"def test_default_param(self) -> None: \"\"\" Test default construction. \"\"\" impl = DetectorRISE() assert",
"from tests import DATA_DIR, EXPECTED_MASKS_4x6 class TestSimilarityScoring (TestCase): def test_init_(self) -> None: \"\"\"",
"import configuration_test_helper from tests import DATA_DIR, EXPECTED_MASKS_4x6 class TestSimilarityScoring (TestCase): def test_init_(self) ->",
"None: \"\"\" Test basic scoring with a single feature for broadcasting sanity check.",
"(2, 15, 25) def test_standard_detection(self) -> None: \"\"\" Test basic scoring on known",
"from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE from xaitk_saliency import GenerateDetectorProposalSaliency from smqtk_core.configuration import configuration_test_helper from",
"non-square masks. \"\"\" impl = DetectorRISE() image1_dets = np.array([[1, 1, 4, 3, 0,",
"'hamming' def test_shape_sanity(self) -> None: \"\"\" Test basic scoring with a single feature",
"0.01, 1, 0.07]],]) sal = impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6) standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy')) assert",
"\"\"\" Test non-default metric type. \"\"\" impl = DetectorRISE('hamming') assert impl.proximity_metric == 'hamming'",
"\"\"\" impl = DetectorRISE() image1_dets = np.array([[1, 1, 4, 3, 0, 1, 0.89]])",
"= np.array([[[1, 2, 6, 6, 0.3, 1, 0.995]], [[0, 1, 2, 2, 0.2,",
"pertb_dets = np.random.rand(10, 2, (7)) pertb_mask = np.random.randint(low=0, high=2, size=(10, 15, 25), dtype='int')",
"6, 0.5, 1, 0.625]], [[0, 2, 3, 5, 0.03, 1, 0.56]], [[1, 2,",
"6, 6, 0.5, 1, 0.625]], [[0, 2, 3, 5, 0.03, 1, 0.56]], [[1,",
"a single feature for broadcasting sanity check. \"\"\" impl = DetectorRISE() np.random.seed(2) image1_dets",
"= np.array([[1, 1, 4, 3, 0, 1, 0.89]]) pertb_dets = np.array([[[1, 2, 6,",
"pertb_dets, pertb_mask) assert sal.shape == (2, 15, 25) def test_standard_detection(self) -> None: \"\"\"",
"1, 0.625]], [[0, 2, 3, 5, 0.03, 1, 0.56]], [[1, 2, 6, 3,",
"Test basic scoring with a single feature for broadcasting sanity check. \"\"\" impl",
"= DetectorRISE('hamming') assert impl.proximity_metric == 'hamming' def test_shape_sanity(self) -> None: \"\"\" Test basic",
"0.995]], [[0, 1, 2, 2, 0.2, 2, 0.03]], [[1, 0, 2, 2, 0.45,",
"[[0, 2, 3, 5, 0.03, 1, 0.56]], [[1, 2, 6, 3, 0.01, 1,",
"assert i.proximity_metric == 'euclidean' def test_metric_args(self) -> None: \"\"\" Test non-default metric type.",
"= DetectorRISE() np.random.seed(2) image1_dets = np.random.rand(2, (7)) pertb_dets = np.random.rand(10, 2, (7)) pertb_mask",
"import DetectorRISE from xaitk_saliency import GenerateDetectorProposalSaliency from smqtk_core.configuration import configuration_test_helper from tests import",
"\"\"\" Test expected configuation behavior. \"\"\" impl = DetectorRISE('euclidean') for i in configuration_test_helper(impl):",
"EXPECTED_MASKS_4x6 class TestSimilarityScoring (TestCase): def test_init_(self) -> None: \"\"\" Test if implementation is",
"TestCase import numpy as np import os from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE from xaitk_saliency",
"assert sal.shape == (2, 15, 25) def test_standard_detection(self) -> None: \"\"\" Test basic",
"configuration_test_helper(impl): assert i.proximity_metric == 'euclidean' def test_metric_args(self) -> None: \"\"\" Test non-default metric",
"construction. \"\"\" impl = DetectorRISE() assert impl.proximity_metric == 'cosine' def test_get_config(self) -> None:",
"xaitk_saliency import GenerateDetectorProposalSaliency from smqtk_core.configuration import configuration_test_helper from tests import DATA_DIR, EXPECTED_MASKS_4x6 class",
"-> None: \"\"\" Test if implementation is usable. \"\"\" impl = DetectorRISE() assert",
"impl = DetectorRISE() np.random.seed(2) image1_dets = np.random.rand(2, (7)) pertb_dets = np.random.rand(10, 2, (7))",
"0, 1, 0.89]]) pertb_dets = np.array([[[1, 2, 6, 6, 0.3, 1, 0.995]], [[0,",
"3, 5, 0.03, 1, 0.56]], [[1, 2, 6, 3, 0.01, 1, 0.07]],]) sal",
"Test expected configuation behavior. \"\"\" impl = DetectorRISE('euclidean') for i in configuration_test_helper(impl): assert",
"\"\"\" Test basic scoring with a single feature for broadcasting sanity check. \"\"\"",
"\"\"\" Test default construction. \"\"\" impl = DetectorRISE() assert impl.proximity_metric == 'cosine' def",
"test_get_config(self) -> None: \"\"\" Test expected configuation behavior. \"\"\" impl = DetectorRISE('euclidean') for",
"25) def test_standard_detection(self) -> None: \"\"\" Test basic scoring on known values and",
"2, 6, 3, 0.01, 1, 0.07]],]) sal = impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6) standard_sal =",
"\"\"\" impl = DetectorRISE() np.random.seed(2) image1_dets = np.random.rand(2, (7)) pertb_dets = np.random.rand(10, 2,",
"smqtk_core.configuration import configuration_test_helper from tests import DATA_DIR, EXPECTED_MASKS_4x6 class TestSimilarityScoring (TestCase): def test_init_(self)",
"impl.proximity_metric == 'cosine' def test_get_config(self) -> None: \"\"\" Test expected configuation behavior. \"\"\"",
"Test non-default metric type. \"\"\" impl = DetectorRISE('hamming') assert impl.proximity_metric == 'hamming' def",
"0.5, 1, 0.625]], [[0, 2, 3, 5, 0.03, 1, 0.56]], [[1, 2, 6,",
"-> None: \"\"\" Test expected configuation behavior. \"\"\" impl = DetectorRISE('euclidean') for i",
"0.07]],]) sal = impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6) standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy')) assert sal.shape ==",
"np.random.rand(10, 2, (7)) pertb_mask = np.random.randint(low=0, high=2, size=(10, 15, 25), dtype='int') sal =",
"= DetectorRISE() image1_dets = np.array([[1, 1, 4, 3, 0, 1, 0.89]]) pertb_dets =",
"1, 0.89]]) pertb_dets = np.array([[[1, 2, 6, 6, 0.3, 1, 0.995]], [[0, 1,",
"def test_shape_sanity(self) -> None: \"\"\" Test basic scoring with a single feature for",
"-> None: \"\"\" Test basic scoring on known values and non-square masks. \"\"\"",
"= np.random.rand(2, (7)) pertb_dets = np.random.rand(10, 2, (7)) pertb_mask = np.random.randint(low=0, high=2, size=(10,",
"test_default_param(self) -> None: \"\"\" Test default construction. \"\"\" impl = DetectorRISE() assert impl.proximity_metric",
"\"\"\" Test if implementation is usable. \"\"\" impl = DetectorRISE() assert impl.is_usable() and",
"3, 0, 1, 0.89]]) pertb_dets = np.array([[[1, 2, 6, 6, 0.3, 1, 0.995]],",
"0.3, 1, 0.995]], [[0, 1, 2, 2, 0.2, 2, 0.03]], [[1, 0, 2,",
"def test_get_config(self) -> None: \"\"\" Test expected configuation behavior. \"\"\" impl = DetectorRISE('euclidean')",
"usable. \"\"\" impl = DetectorRISE() assert impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency) def test_default_param(self) ->",
"'euclidean' def test_metric_args(self) -> None: \"\"\" Test non-default metric type. \"\"\" impl =",
"DetectorRISE from xaitk_saliency import GenerateDetectorProposalSaliency from smqtk_core.configuration import configuration_test_helper from tests import DATA_DIR,",
"DetectorRISE() assert impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency) def test_default_param(self) -> None: \"\"\" Test default",
"scoring with a single feature for broadcasting sanity check. \"\"\" impl = DetectorRISE()",
"2, 3, 5, 0.03, 1, 0.56]], [[1, 2, 6, 3, 0.01, 1, 0.07]],])",
"np.random.rand(2, (7)) pertb_dets = np.random.rand(10, 2, (7)) pertb_mask = np.random.randint(low=0, high=2, size=(10, 15,",
"as np import os from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE from xaitk_saliency import GenerateDetectorProposalSaliency from",
"xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE from xaitk_saliency import GenerateDetectorProposalSaliency from smqtk_core.configuration import configuration_test_helper from tests",
"impl.generate(image1_dets, pertb_dets, pertb_mask) assert sal.shape == (2, 15, 25) def test_standard_detection(self) -> None:",
"isinstance(impl, GenerateDetectorProposalSaliency) def test_default_param(self) -> None: \"\"\" Test default construction. \"\"\" impl =",
"\"\"\" impl = DetectorRISE('euclidean') for i in configuration_test_helper(impl): assert i.proximity_metric == 'euclidean' def",
"2, (7)) pertb_mask = np.random.randint(low=0, high=2, size=(10, 15, 25), dtype='int') sal = impl.generate(image1_dets,",
"pertb_mask) assert sal.shape == (2, 15, 25) def test_standard_detection(self) -> None: \"\"\" Test",
"high=2, size=(10, 15, 25), dtype='int') sal = impl.generate(image1_dets, pertb_dets, pertb_mask) assert sal.shape ==",
"scoring on known values and non-square masks. \"\"\" impl = DetectorRISE() image1_dets =",
"'cosine' def test_get_config(self) -> None: \"\"\" Test expected configuation behavior. \"\"\" impl =",
"DetectorRISE() image1_dets = np.array([[1, 1, 4, 3, 0, 1, 0.89]]) pertb_dets = np.array([[[1,",
"0.625]], [[0, 2, 3, 5, 0.03, 1, 0.56]], [[1, 2, 6, 3, 0.01,",
"standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy')) assert sal.shape == (1, 4, 6) assert np.allclose(standard_sal, sal)",
"configuation behavior. \"\"\" impl = DetectorRISE('euclidean') for i in configuration_test_helper(impl): assert i.proximity_metric ==",
"== 'euclidean' def test_metric_args(self) -> None: \"\"\" Test non-default metric type. \"\"\" impl",
"DetectorRISE('euclidean') for i in configuration_test_helper(impl): assert i.proximity_metric == 'euclidean' def test_metric_args(self) -> None:",
"None: \"\"\" Test if implementation is usable. \"\"\" impl = DetectorRISE() assert impl.is_usable()",
"= impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6) standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy')) assert sal.shape == (1, 4,",
"def test_metric_args(self) -> None: \"\"\" Test non-default metric type. \"\"\" impl = DetectorRISE('hamming')",
"sal = impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6) standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy')) assert sal.shape == (1,",
"masks. \"\"\" impl = DetectorRISE() image1_dets = np.array([[1, 1, 4, 3, 0, 1,",
"None: \"\"\" Test basic scoring on known values and non-square masks. \"\"\" impl",
"-> None: \"\"\" Test default construction. \"\"\" impl = DetectorRISE() assert impl.proximity_metric ==",
"impl = DetectorRISE('hamming') assert impl.proximity_metric == 'hamming' def test_shape_sanity(self) -> None: \"\"\" Test",
"np.random.seed(2) image1_dets = np.random.rand(2, (7)) pertb_dets = np.random.rand(10, 2, (7)) pertb_mask = np.random.randint(low=0,",
"basic scoring on known values and non-square masks. \"\"\" impl = DetectorRISE() image1_dets",
"DetectorRISE('hamming') assert impl.proximity_metric == 'hamming' def test_shape_sanity(self) -> None: \"\"\" Test basic scoring",
"behavior. \"\"\" impl = DetectorRISE('euclidean') for i in configuration_test_helper(impl): assert i.proximity_metric == 'euclidean'",
"== 'hamming' def test_shape_sanity(self) -> None: \"\"\" Test basic scoring with a single",
"tests import DATA_DIR, EXPECTED_MASKS_4x6 class TestSimilarityScoring (TestCase): def test_init_(self) -> None: \"\"\" Test",
"np.array([[[1, 2, 6, 6, 0.3, 1, 0.995]], [[0, 1, 2, 2, 0.2, 2,",
"[[1, 1, 6, 6, 0.5, 1, 0.625]], [[0, 2, 3, 5, 0.03, 1,",
"\"\"\" impl = DetectorRISE() assert impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency) def test_default_param(self) -> None:",
"\"\"\" impl = DetectorRISE('hamming') assert impl.proximity_metric == 'hamming' def test_shape_sanity(self) -> None: \"\"\"",
"with a single feature for broadcasting sanity check. \"\"\" impl = DetectorRISE() np.random.seed(2)",
"import numpy as np import os from xaitk_saliency.impls.gen_detector_prop_sal.drise_scoring import DetectorRISE from xaitk_saliency import",
"import DATA_DIR, EXPECTED_MASKS_4x6 class TestSimilarityScoring (TestCase): def test_init_(self) -> None: \"\"\" Test if",
"assert impl.is_usable() and isinstance(impl, GenerateDetectorProposalSaliency) def test_default_param(self) -> None: \"\"\" Test default construction.",
"= DetectorRISE() assert impl.proximity_metric == 'cosine' def test_get_config(self) -> None: \"\"\" Test expected",
"size=(10, 15, 25), dtype='int') sal = impl.generate(image1_dets, pertb_dets, pertb_mask) assert sal.shape == (2,",
"== (2, 15, 25) def test_standard_detection(self) -> None: \"\"\" Test basic scoring on",
"2, 0.03]], [[1, 0, 2, 2, 0.45, 1, 0.81]], [[1, 1, 6, 6,",
"(7)) pertb_dets = np.random.rand(10, 2, (7)) pertb_mask = np.random.randint(low=0, high=2, size=(10, 15, 25),",
"0.03]], [[1, 0, 2, 2, 0.45, 1, 0.81]], [[1, 1, 6, 6, 0.5,",
"1, 0.07]],]) sal = impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6) standard_sal = np.load(os.path.join(DATA_DIR, 'drisesal.npy')) assert sal.shape",
"-> None: \"\"\" Test basic scoring with a single feature for broadcasting sanity",
"\"\"\" impl = DetectorRISE() assert impl.proximity_metric == 'cosine' def test_get_config(self) -> None: \"\"\"",
"impl = DetectorRISE() image1_dets = np.array([[1, 1, 4, 3, 0, 1, 0.89]]) pertb_dets",
"0.56]], [[1, 2, 6, 3, 0.01, 1, 0.07]],]) sal = impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6)",
"[[1, 2, 6, 3, 0.01, 1, 0.07]],]) sal = impl.generate(image1_dets, pertb_dets, EXPECTED_MASKS_4x6) standard_sal",
"-> None: \"\"\" Test non-default metric type. \"\"\" impl = DetectorRISE('hamming') assert impl.proximity_metric",
"impl = DetectorRISE() assert impl.proximity_metric == 'cosine' def test_get_config(self) -> None: \"\"\" Test",
"on known values and non-square masks. \"\"\" impl = DetectorRISE() image1_dets = np.array([[1,",
"sal = impl.generate(image1_dets, pertb_dets, pertb_mask) assert sal.shape == (2, 15, 25) def test_standard_detection(self)",
"pertb_mask = np.random.randint(low=0, high=2, size=(10, 15, 25), dtype='int') sal = impl.generate(image1_dets, pertb_dets, pertb_mask)"
] |
[
"= pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100 AllocateList['平'] = (AllocateList['平']*bet_amt)/100 AllocateList['负'] = (AllocateList['负']*bet_amt)/100 AllocateList['胜入'] =",
"as os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100 AllocateList['平'] = (AllocateList['平']*bet_amt)/100",
"return OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),] elif keep_side == '留负': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),]",
"OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜'] - v_win return OPTAllocateList.loc[OPTAllocateList['最小胜'] ==",
"= AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜'] -",
"= AllocateList['负']*(v_los+1) AllocateList['总出'] = AllocateList['胜'] + AllocateList['平'] + AllocateList['负'] AllocateList['胜利'] = AllocateList['胜入'] -",
"OPTAllocateList['杠杆差'] = OPTAllocateList['最小负'] - v_los return OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),] # 波黑-波兰 GetArbitrage(1.55,2.125,1.875,100,'留胜') #",
"& (AllocateList['负利']>0),] OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜'] - v_win return",
"+ AllocateList['平'] + AllocateList['负'] AllocateList['胜利'] = AllocateList['胜入'] - AllocateList['总出'] AllocateList['平利'] = AllocateList['平入'] -",
"= (AllocateList['平']*bet_amt)/100 AllocateList['负'] = (AllocateList['负']*bet_amt)/100 AllocateList['胜入'] = AllocateList['胜']*(v_win+1) AllocateList['平入'] = AllocateList['平']*(v_tie+1) AllocateList['负入'] =",
"= (OPTAllocateList['总出']/OPTAllocateList['胜']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜'] - v_win return OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),]",
"AllocateList['胜'] + AllocateList['平'] + AllocateList['负'] AllocateList['胜利'] = AllocateList['胜入'] - AllocateList['总出'] AllocateList['平利'] = AllocateList['平入']",
"OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小平']",
"(AllocateList['负利']>0),] OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小平'] - v_tie return OPTAllocateList.loc[OPTAllocateList['最小平']",
"AllocateList['平'] + AllocateList['负'] AllocateList['胜利'] = AllocateList['胜入'] - AllocateList['总出'] AllocateList['平利'] = AllocateList['平入'] - AllocateList['总出']",
"- v_los return OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),] # 波黑-波兰 GetArbitrage(1.55,2.125,1.875,100,'留胜') # 2.125 GetArbitrage(1.55,2.125,1.875,100,'留平') #",
"as np import os as os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] =",
"return OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),] elif keep_side == '留平': OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),]",
"== OPTAllocateList['最小胜'].min(),] elif keep_side == '留平': OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小平'] =",
"== '留胜': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜']) - 1 OPTAllocateList['杠杆差']",
"1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小平'] - v_tie return OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),] elif keep_side ==",
"- AllocateList['总出'] AllocateList['平利'] = AllocateList['平入'] - AllocateList['总出'] AllocateList['负利'] = AllocateList['负入'] - AllocateList['总出'] if",
"'留胜': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜']) - 1 OPTAllocateList['杠杆差'] =",
"OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜']",
"= (AllocateList['胜']*bet_amt)/100 AllocateList['平'] = (AllocateList['平']*bet_amt)/100 AllocateList['负'] = (AllocateList['负']*bet_amt)/100 AllocateList['胜入'] = AllocateList['胜']*(v_win+1) AllocateList['平入'] =",
"v_los return OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),] # 波黑-波兰 GetArbitrage(1.55,2.125,1.875,100,'留胜') # 2.125 GetArbitrage(1.55,2.125,1.875,100,'留平') # 3.0",
"= OPTAllocateList['最小胜'] - v_win return OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),] elif keep_side == '留平': OPTAllocateList",
"(AllocateList['负利']>0),] OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜'] - v_win return OPTAllocateList.loc[OPTAllocateList['最小胜']",
"os as os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100 AllocateList['平'] =",
"& (AllocateList['负利']>0),] OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小平'] - v_tie return",
"(AllocateList['胜']*bet_amt)/100 AllocateList['平'] = (AllocateList['平']*bet_amt)/100 AllocateList['负'] = (AllocateList['负']*bet_amt)/100 AllocateList['胜入'] = AllocateList['胜']*(v_win+1) AllocateList['平入'] = AllocateList['平']*(v_tie+1)",
"elif keep_side == '留负': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),] OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负']) -",
"(AllocateList['平']*bet_amt)/100 AllocateList['负'] = (AllocateList['负']*bet_amt)/100 AllocateList['胜入'] = AllocateList['胜']*(v_win+1) AllocateList['平入'] = AllocateList['平']*(v_tie+1) AllocateList['负入'] = AllocateList['负']*(v_los+1)",
"= AllocateList['胜']*(v_win+1) AllocateList['平入'] = AllocateList['平']*(v_tie+1) AllocateList['负入'] = AllocateList['负']*(v_los+1) AllocateList['总出'] = AllocateList['胜'] + AllocateList['平']",
"AllocateList['平入'] = AllocateList['平']*(v_tie+1) AllocateList['负入'] = AllocateList['负']*(v_los+1) AllocateList['总出'] = AllocateList['胜'] + AllocateList['平'] + AllocateList['负']",
"AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100 AllocateList['平'] = (AllocateList['平']*bet_amt)/100 AllocateList['负'] = (AllocateList['负']*bet_amt)/100 AllocateList['胜入']",
"OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),] elif keep_side == '留平': OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小平']",
"= AllocateList['胜入'] - AllocateList['总出'] AllocateList['平利'] = AllocateList['平入'] - AllocateList['总出'] AllocateList['负利'] = AllocateList['负入'] -",
"AllocateList['负利'] = AllocateList['负入'] - AllocateList['总出'] if keep_side == '留胜': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) &",
"AllocateList['胜']*(v_win+1) AllocateList['平入'] = AllocateList['平']*(v_tie+1) AllocateList['负入'] = AllocateList['负']*(v_los+1) AllocateList['总出'] = AllocateList['胜'] + AllocateList['平'] +",
"AllocateList['平利'] = AllocateList['平入'] - AllocateList['总出'] AllocateList['负利'] = AllocateList['负入'] - AllocateList['总出'] if keep_side ==",
"== '留负': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),] OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负']) - 1 OPTAllocateList['杠杆差']",
"(OPTAllocateList['总出']/OPTAllocateList['平']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小平'] - v_tie return OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),] elif",
"1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小负'] - v_los return OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),] # 波黑-波兰 GetArbitrage(1.55,2.125,1.875,100,'留胜')",
"AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100 AllocateList['平'] = (AllocateList['平']*bet_amt)/100 AllocateList['负'] = (AllocateList['负']*bet_amt)/100 AllocateList['胜入'] = AllocateList['胜']*(v_win+1) AllocateList['平入']",
"elif keep_side == '留平': OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平']) -",
"= OPTAllocateList['最小负'] - v_los return OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),] # 波黑-波兰 GetArbitrage(1.55,2.125,1.875,100,'留胜') # 2.125",
"- AllocateList['总出'] AllocateList['负利'] = AllocateList['负入'] - AllocateList['总出'] if keep_side == '留胜': OPTAllocateList =",
"import numpy as np import os as os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None)",
"AllocateList['胜利'] = AllocateList['胜入'] - AllocateList['总出'] AllocateList['平利'] = AllocateList['平入'] - AllocateList['总出'] AllocateList['负利'] = AllocateList['负入']",
"AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜'] - v_win",
"OPTAllocateList['杠杆差'] = OPTAllocateList['最小平'] - v_tie return OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),] elif keep_side == '留负':",
"= AllocateList['胜'] + AllocateList['平'] + AllocateList['负'] AllocateList['胜利'] = AllocateList['胜入'] - AllocateList['总出'] AllocateList['平利'] =",
"AllocateList['胜入'] = AllocateList['胜']*(v_win+1) AllocateList['平入'] = AllocateList['平']*(v_tie+1) AllocateList['负入'] = AllocateList['负']*(v_los+1) AllocateList['总出'] = AllocateList['胜'] +",
"OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),] # 波黑-波兰 GetArbitrage(1.55,2.125,1.875,100,'留胜') # 2.125 GetArbitrage(1.55,2.125,1.875,100,'留平') # 3.0 GetArbitrage(1.55,2.125,1.875,100,'留负') #",
"1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜'] - v_win return OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),] elif keep_side ==",
"OPTAllocateList['最小平'].min(),] elif keep_side == '留负': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),] OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负'])",
"np import os as os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100",
"AllocateList['负入'] - AllocateList['总出'] if keep_side == '留胜': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小胜']",
"- 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜'] - v_win return OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),] elif keep_side",
"= OPTAllocateList['最小平'] - v_tie return OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),] elif keep_side == '留负': OPTAllocateList",
"as pd import numpy as np import os as os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList",
"AllocateList['平']*(v_tie+1) AllocateList['负入'] = AllocateList['负']*(v_los+1) AllocateList['总出'] = AllocateList['胜'] + AllocateList['平'] + AllocateList['负'] AllocateList['胜利'] =",
"AllocateList['负'] AllocateList['胜利'] = AllocateList['胜入'] - AllocateList['总出'] AllocateList['平利'] = AllocateList['平入'] - AllocateList['总出'] AllocateList['负利'] =",
"numpy as np import os as os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜']",
"(AllocateList['胜利']>0),] OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小负'] - v_los return OPTAllocateList.loc[OPTAllocateList['最小负']",
"- v_win return OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),] elif keep_side == '留平': OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0)",
"AllocateList['负'] = (AllocateList['负']*bet_amt)/100 AllocateList['胜入'] = AllocateList['胜']*(v_win+1) AllocateList['平入'] = AllocateList['平']*(v_tie+1) AllocateList['负入'] = AllocateList['负']*(v_los+1) AllocateList['总出']",
"= AllocateList['负入'] - AllocateList['总出'] if keep_side == '留胜': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),]",
"AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),] OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小负'] - v_los",
"+ AllocateList['负'] AllocateList['胜利'] = AllocateList['胜入'] - AllocateList['总出'] AllocateList['平利'] = AllocateList['平入'] - AllocateList['总出'] AllocateList['负利']",
"keep_side == '留平': OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平']) - 1",
"AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小平'] - v_tie",
"AllocateList['总出'] AllocateList['平利'] = AllocateList['平入'] - AllocateList['总出'] AllocateList['负利'] = AllocateList['负入'] - AllocateList['总出'] if keep_side",
"pandas as pd import numpy as np import os as os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side):",
"v_tie return OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),] elif keep_side == '留负': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) &",
"pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100 AllocateList['平'] = (AllocateList['平']*bet_amt)/100 AllocateList['负'] = (AllocateList['负']*bet_amt)/100 AllocateList['胜入'] = AllocateList['胜']*(v_win+1)",
"OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小平'] - v_tie return OPTAllocateList.loc[OPTAllocateList['最小平'] ==",
"'留平': OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平']) - 1 OPTAllocateList['杠杆差'] =",
"== OPTAllocateList['最小平'].min(),] elif keep_side == '留负': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),] OPTAllocateList['最小负'] =",
"= AllocateList['平入'] - AllocateList['总出'] AllocateList['负利'] = AllocateList['负入'] - AllocateList['总出'] if keep_side == '留胜':",
"import os as os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100 AllocateList['平']",
"= AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小平'] -",
"(OPTAllocateList['总出']/OPTAllocateList['负']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小负'] - v_los return OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),] #",
"pd import numpy as np import os as os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList =",
"- 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小平'] - v_tie return OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),] elif keep_side",
"OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小负'] - v_los return OPTAllocateList.loc[OPTAllocateList['最小负'] ==",
"= (OPTAllocateList['总出']/OPTAllocateList['平']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小平'] - v_tie return OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),]",
"OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),] OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小负']",
"- v_tie return OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),] elif keep_side == '留负': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0)",
"keep_side == '留负': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),] OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负']) - 1",
"(OPTAllocateList['总出']/OPTAllocateList['胜']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜'] - v_win return OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),] elif",
"<filename>AA_FOOTBALL/GuessLeverage.py import pandas as pd import numpy as np import os as os",
"OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),] elif keep_side == '留负': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),] OPTAllocateList['最小负']",
"= AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),] OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小负'] -",
"import pandas as pd import numpy as np import os as os def",
"AllocateList['平入'] - AllocateList['总出'] AllocateList['负利'] = AllocateList['负入'] - AllocateList['总出'] if keep_side == '留胜': OPTAllocateList",
"- 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小负'] - v_los return OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),] # 波黑-波兰",
"AllocateList['总出'] if keep_side == '留胜': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜'])",
"if keep_side == '留胜': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜']) -",
"(AllocateList['负']*bet_amt)/100 AllocateList['胜入'] = AllocateList['胜']*(v_win+1) AllocateList['平入'] = AllocateList['平']*(v_tie+1) AllocateList['负入'] = AllocateList['负']*(v_los+1) AllocateList['总出'] = AllocateList['胜']",
"OPTAllocateList['最小胜'] - v_win return OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),] elif keep_side == '留平': OPTAllocateList =",
"OPTAllocateList['杠杆差'] = OPTAllocateList['最小胜'] - v_win return OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),] elif keep_side == '留平':",
"def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100 AllocateList['平'] = (AllocateList['平']*bet_amt)/100 AllocateList['负'] =",
"AllocateList['胜入'] - AllocateList['总出'] AllocateList['平利'] = AllocateList['平入'] - AllocateList['总出'] AllocateList['负利'] = AllocateList['负入'] - AllocateList['总出']",
"== OPTAllocateList['最小负'].min(),] # 波黑-波兰 GetArbitrage(1.55,2.125,1.875,100,'留胜') # 2.125 GetArbitrage(1.55,2.125,1.875,100,'留平') # 3.0 GetArbitrage(1.55,2.125,1.875,100,'留负') # 2.7",
"OPTAllocateList['最小负'] - v_los return OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),] # 波黑-波兰 GetArbitrage(1.55,2.125,1.875,100,'留胜') # 2.125 GetArbitrage(1.55,2.125,1.875,100,'留平')",
"= (OPTAllocateList['总出']/OPTAllocateList['负']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小负'] - v_los return OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),]",
"return OPTAllocateList.loc[OPTAllocateList['最小负'] == OPTAllocateList['最小负'].min(),] # 波黑-波兰 GetArbitrage(1.55,2.125,1.875,100,'留胜') # 2.125 GetArbitrage(1.55,2.125,1.875,100,'留平') # 3.0 GetArbitrage(1.55,2.125,1.875,100,'留负')",
"AllocateList['负入'] = AllocateList['负']*(v_los+1) AllocateList['总出'] = AllocateList['胜'] + AllocateList['平'] + AllocateList['负'] AllocateList['胜利'] = AllocateList['胜入']",
"= (AllocateList['负']*bet_amt)/100 AllocateList['胜入'] = AllocateList['胜']*(v_win+1) AllocateList['平入'] = AllocateList['平']*(v_tie+1) AllocateList['负入'] = AllocateList['负']*(v_los+1) AllocateList['总出'] =",
"OPTAllocateList['最小胜'].min(),] elif keep_side == '留平': OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平'])",
"keep_side == '留胜': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小胜'] = (OPTAllocateList['总出']/OPTAllocateList['胜']) - 1",
"AllocateList['平'] = (AllocateList['平']*bet_amt)/100 AllocateList['负'] = (AllocateList['负']*bet_amt)/100 AllocateList['胜入'] = AllocateList['胜']*(v_win+1) AllocateList['平入'] = AllocateList['平']*(v_tie+1) AllocateList['负入']",
"& (AllocateList['胜利']>0),] OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负']) - 1 OPTAllocateList['杠杆差'] = OPTAllocateList['最小负'] - v_los return",
"AllocateList['负']*(v_los+1) AllocateList['总出'] = AllocateList['胜'] + AllocateList['平'] + AllocateList['负'] AllocateList['胜利'] = AllocateList['胜入'] - AllocateList['总出']",
"AllocateList['总出'] AllocateList['负利'] = AllocateList['负入'] - AllocateList['总出'] if keep_side == '留胜': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0)",
"GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100 AllocateList['平'] = (AllocateList['平']*bet_amt)/100 AllocateList['负'] = (AllocateList['负']*bet_amt)/100",
"= AllocateList['平']*(v_tie+1) AllocateList['负入'] = AllocateList['负']*(v_los+1) AllocateList['总出'] = AllocateList['胜'] + AllocateList['平'] + AllocateList['负'] AllocateList['胜利']",
"os def GetLverage(v_win,v_tie,v_los,bet_amt,keep_side): AllocateList = pd.read_csv(r'./data/cmb_list.csv',index_col=None) AllocateList['胜'] = (AllocateList['胜']*bet_amt)/100 AllocateList['平'] = (AllocateList['平']*bet_amt)/100 AllocateList['负']",
"AllocateList['总出'] = AllocateList['胜'] + AllocateList['平'] + AllocateList['负'] AllocateList['胜利'] = AllocateList['胜入'] - AllocateList['总出'] AllocateList['平利']",
"v_win return OPTAllocateList.loc[OPTAllocateList['最小胜'] == OPTAllocateList['最小胜'].min(),] elif keep_side == '留平': OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0) &",
"== '留平': OPTAllocateList = AllocateList.loc[(AllocateList['胜利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小平'] = (OPTAllocateList['总出']/OPTAllocateList['平']) - 1 OPTAllocateList['杠杆差']",
"- AllocateList['总出'] if keep_side == '留胜': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['负利']>0),] OPTAllocateList['最小胜'] =",
"'留负': OPTAllocateList = AllocateList.loc[(AllocateList['平利']>0) & (AllocateList['胜利']>0),] OPTAllocateList['最小负'] = (OPTAllocateList['总出']/OPTAllocateList['负']) - 1 OPTAllocateList['杠杆差'] =",
"OPTAllocateList['最小平'] - v_tie return OPTAllocateList.loc[OPTAllocateList['最小平'] == OPTAllocateList['最小平'].min(),] elif keep_side == '留负': OPTAllocateList ="
] |
[
"+ 1] = dp[i][j] and (p[i] == s[j] or p[i] == '.') return",
"1] == '.': dp[i + 1][j + 1] |= dp[i + 1][j] else:",
"for i in range(len(p)): for j in range(len(s)): if p[i] == '*': dp[i",
"case ?? @ Time | Space \"\"\" def isMatch(self, s, p): # 长为S+1",
"+ 1) for _ in range(len(p) + 1)] dp[0][0] = True for i",
"in range(len(s)): if p[i] == '*': dp[i + 1][j + 1] = dp[i",
"Solution: \"\"\" @param s: A string @param p: A string includes \".\" and",
"dp[i - 1][0] and p[i] == '*' for i in range(len(p)): for j",
"1] if p[i - 1] == s[j] or p[i - 1] == '.':",
"| Space \"\"\" def isMatch(self, s, p): # 长为S+1 高为p+1 的二维矩阵 dp =",
"p[i] == '*' for i in range(len(p)): for j in range(len(s)): if p[i]",
"p: A string includes \".\" and \"*\" @return: A boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @",
"in range(len(p) + 1)] dp[0][0] = True for i in range(1, len(p)): dp[i",
"== s[j] or p[i - 1] == '.': dp[i + 1][j + 1]",
"1][j] else: dp[i + 1][j + 1] = dp[i][j] and (p[i] == s[j]",
"1][j + 1] or dp[i][j + 1] if p[i - 1] == s[j]",
"@return: A boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner case ?? @ Time | Space",
"== '*': dp[i + 1][j + 1] = dp[i - 1][j + 1]",
"p[i - 1] == '.': dp[i + 1][j + 1] |= dp[i +",
"= dp[i - 1][0] and p[i] == '*' for i in range(len(p)): for",
"and p[i] == '*' for i in range(len(p)): for j in range(len(s)): if",
"A boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner case ?? @ Time | Space \"\"\"",
"p[i] == '*': dp[i + 1][j + 1] = dp[i - 1][j +",
"includes \".\" and \"*\" @return: A boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner case ??",
"@ Time | Space \"\"\" def isMatch(self, s, p): # 长为S+1 高为p+1 的二维矩阵",
"or dp[i][j + 1] if p[i - 1] == s[j] or p[i -",
"1) for _ in range(len(p) + 1)] dp[0][0] = True for i in",
"1][j + 1] = dp[i - 1][j + 1] or dp[i][j + 1]",
"string @param p: A string includes \".\" and \"*\" @return: A boolean @",
"1][j + 1] |= dp[i + 1][j] else: dp[i + 1][j + 1]",
"s, p): # 长为S+1 高为p+1 的二维矩阵 dp = [[False] * (len(s) + 1)",
"if p[i - 1] == s[j] or p[i - 1] == '.': dp[i",
"@ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner case ?? @ Time | Space \"\"\" def isMatch(self,",
"Time | Space \"\"\" def isMatch(self, s, p): # 长为S+1 高为p+1 的二维矩阵 dp",
"?? @ Time | Space \"\"\" def isMatch(self, s, p): # 长为S+1 高为p+1",
"i in range(1, len(p)): dp[i + 1][0] = dp[i - 1][0] and p[i]",
"@param s: A string @param p: A string includes \".\" and \"*\" @return:",
"dp[i + 1][0] = dp[i - 1][0] and p[i] == '*' for i",
"(len(s) + 1) for _ in range(len(p) + 1)] dp[0][0] = True for",
"+ 1] |= dp[i + 1][j] else: dp[i + 1][j + 1] =",
"dp[i + 1][j + 1] = dp[i][j] and (p[i] == s[j] or p[i]",
"+ 1)] dp[0][0] = True for i in range(1, len(p)): dp[i + 1][0]",
"range(1, len(p)): dp[i + 1][0] = dp[i - 1][0] and p[i] == '*'",
"= [[False] * (len(s) + 1) for _ in range(len(p) + 1)] dp[0][0]",
"\".\" and \"*\" @return: A boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner case ?? @",
"[[False] * (len(s) + 1) for _ in range(len(p) + 1)] dp[0][0] =",
"1][j + 1] = dp[i][j] and (p[i] == s[j] or p[i] == '.')",
"dp[i][j + 1] if p[i - 1] == s[j] or p[i - 1]",
"@param p: A string includes \".\" and \"*\" @return: A boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。",
"boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner case ?? @ Time | Space \"\"\" def",
"+ 1] if p[i - 1] == s[j] or p[i - 1] ==",
"\"*\" @return: A boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner case ?? @ Time |",
"== '*' for i in range(len(p)): for j in range(len(s)): if p[i] ==",
"高为p+1 的二维矩阵 dp = [[False] * (len(s) + 1) for _ in range(len(p)",
"1][0] and p[i] == '*' for i in range(len(p)): for j in range(len(s)):",
"for _ in range(len(p) + 1)] dp[0][0] = True for i in range(1,",
"len(p)): dp[i + 1][0] = dp[i - 1][0] and p[i] == '*' for",
"p[i - 1] == s[j] or p[i - 1] == '.': dp[i +",
"1][0] = dp[i - 1][0] and p[i] == '*' for i in range(len(p)):",
"in range(len(p)): for j in range(len(s)): if p[i] == '*': dp[i + 1][j",
"'*' for i in range(len(p)): for j in range(len(s)): if p[i] == '*':",
"# 长为S+1 高为p+1 的二维矩阵 dp = [[False] * (len(s) + 1) for _",
"1] |= dp[i + 1][j] else: dp[i + 1][j + 1] = dp[i][j]",
"A string @param p: A string includes \".\" and \"*\" @return: A boolean",
"_ in range(len(p) + 1)] dp[0][0] = True for i in range(1, len(p)):",
"= dp[i - 1][j + 1] or dp[i][j + 1] if p[i -",
"= True for i in range(1, len(p)): dp[i + 1][0] = dp[i -",
"or p[i - 1] == '.': dp[i + 1][j + 1] |= dp[i",
"+ 1] or dp[i][j + 1] if p[i - 1] == s[j] or",
"1] = dp[i][j] and (p[i] == s[j] or p[i] == '.') return dp[-1][-1]",
"True for i in range(1, len(p)): dp[i + 1][0] = dp[i - 1][0]",
"的二维矩阵 dp = [[False] * (len(s) + 1) for _ in range(len(p) +",
"dp[0][0] = True for i in range(1, len(p)): dp[i + 1][0] = dp[i",
"1] or dp[i][j + 1] if p[i - 1] == s[j] or p[i",
"if p[i] == '*': dp[i + 1][j + 1] = dp[i - 1][j",
"+ 1][j + 1] |= dp[i + 1][j] else: dp[i + 1][j +",
"== '.': dp[i + 1][j + 1] |= dp[i + 1][j] else: dp[i",
"s[j] or p[i - 1] == '.': dp[i + 1][j + 1] |=",
"\"\"\" @param s: A string @param p: A string includes \".\" and \"*\"",
"range(len(s)): if p[i] == '*': dp[i + 1][j + 1] = dp[i -",
"- 1] == '.': dp[i + 1][j + 1] |= dp[i + 1][j]",
"p): # 长为S+1 高为p+1 的二维矩阵 dp = [[False] * (len(s) + 1) for",
"s: A string @param p: A string includes \".\" and \"*\" @return: A",
"def isMatch(self, s, p): # 长为S+1 高为p+1 的二维矩阵 dp = [[False] * (len(s)",
"+ 1][0] = dp[i - 1][0] and p[i] == '*' for i in",
"class Solution: \"\"\" @param s: A string @param p: A string includes \".\"",
"+ 1][j] else: dp[i + 1][j + 1] = dp[i][j] and (p[i] ==",
"else: dp[i + 1][j + 1] = dp[i][j] and (p[i] == s[j] or",
"+ 1][j + 1] = dp[i][j] and (p[i] == s[j] or p[i] ==",
"dp[i + 1][j] else: dp[i + 1][j + 1] = dp[i][j] and (p[i]",
"isMatch(self, s, p): # 长为S+1 高为p+1 的二维矩阵 dp = [[False] * (len(s) +",
"- 1][0] and p[i] == '*' for i in range(len(p)): for j in",
"conner case ?? @ Time | Space \"\"\" def isMatch(self, s, p): #",
"'.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner case ?? @ Time | Space \"\"\" def isMatch(self, s,",
"Space \"\"\" def isMatch(self, s, p): # 长为S+1 高为p+1 的二维矩阵 dp = [[False]",
"- 1] == s[j] or p[i - 1] == '.': dp[i + 1][j",
"@ conner case ?? @ Time | Space \"\"\" def isMatch(self, s, p):",
"1] = dp[i - 1][j + 1] or dp[i][j + 1] if p[i",
"dp = [[False] * (len(s) + 1) for _ in range(len(p) + 1)]",
"dp[i + 1][j + 1] |= dp[i + 1][j] else: dp[i + 1][j",
"'*': dp[i + 1][j + 1] = dp[i - 1][j + 1] or",
"'.': dp[i + 1][j + 1] |= dp[i + 1][j] else: dp[i +",
"dp[i - 1][j + 1] or dp[i][j + 1] if p[i - 1]",
"A string includes \".\" and \"*\" @return: A boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner",
"and \"*\" @return: A boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner case ?? @ Time",
"长为S+1 高为p+1 的二维矩阵 dp = [[False] * (len(s) + 1) for _ in",
"range(len(p) + 1)] dp[0][0] = True for i in range(1, len(p)): dp[i +",
"+ 1] = dp[i - 1][j + 1] or dp[i][j + 1] if",
"1] == s[j] or p[i - 1] == '.': dp[i + 1][j +",
"string includes \".\" and \"*\" @return: A boolean @ '.'匹配任意一个字母。'*'匹配零个或者多个前面的元素。 @ conner case",
"1)] dp[0][0] = True for i in range(1, len(p)): dp[i + 1][0] =",
"i in range(len(p)): for j in range(len(s)): if p[i] == '*': dp[i +",
"in range(1, len(p)): dp[i + 1][0] = dp[i - 1][0] and p[i] ==",
"for i in range(1, len(p)): dp[i + 1][0] = dp[i - 1][0] and",
"dp[i + 1][j + 1] = dp[i - 1][j + 1] or dp[i][j",
"- 1][j + 1] or dp[i][j + 1] if p[i - 1] ==",
"* (len(s) + 1) for _ in range(len(p) + 1)] dp[0][0] = True",
"+ 1][j + 1] = dp[i - 1][j + 1] or dp[i][j +",
"|= dp[i + 1][j] else: dp[i + 1][j + 1] = dp[i][j] and",
"j in range(len(s)): if p[i] == '*': dp[i + 1][j + 1] =",
"range(len(p)): for j in range(len(s)): if p[i] == '*': dp[i + 1][j +",
"\"\"\" def isMatch(self, s, p): # 长为S+1 高为p+1 的二维矩阵 dp = [[False] *",
"for j in range(len(s)): if p[i] == '*': dp[i + 1][j + 1]"
] |
[
"not os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") # make vis directory within",
"(str): base directory of logged model shrink_factor (int): factor by which to downsample",
"print(\"creating training evolution gif\") # clean up directory input direct = re.sub(r\"(\\/)?$\", \"\",",
"data (list): list of strings to sort Returns: (list): sorted list \"\"\" convert",
"of logged model number_ticks (int): number of ticks to have on graph \"\"\"",
"gif applies only when --create-gif is supplied\") parser.add_argument(\"--interval\", type=float, default=0.1, help=\"time interval when",
"make_gif(direct, shrink_factor=4, skip_rate=2, interval=0.1, until=None, progress_bar=False): \"\"\" Function to create gif from images",
"sys.exit(\"error occurred with gif progress bar, do manual check\") if __name__ == \"__main__\":",
"if not os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") # make vis directory",
"] kargs = {'duration': interval} imageio.mimsave(directLong + \"/vis/vis.gif\", sorted_list, **kargs) optimize(directLong + \"/vis/vis.gif\",",
"sorted_alphanumeric(data): \"\"\" Function to sort number-containing strings Args: data (list): list of strings",
"default=0.1, help=\"time interval when constructing gifs from\" + \" images, applies only when",
"numpy as np from tqdm import tqdm from PIL import Image from pygifsicle",
"of ticks to have on graph \"\"\" direct = re.sub(r\"(\\/)?$\", \"\", direct) direct",
"use for gif interval (float): temporal interval for gif construction or speed until",
"import os import glob import imageio import argparse import subprocess import numpy as",
"default=False, action=\"store_true\", help=\"option to activate gif creation\") parser.add_argument(\"--shrink-factor\", type=int, default=4, help=\"shrinking factor for",
"\"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong = \"./pickles/\" + direct if",
"progress_bar: print(\"adding progress bar to gif\") output = subprocess.call(\"cat \" + directLong +",
"+ direct if not os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") # make",
"does not exist\") # make vis directory within log directory os.makedirs(directLong + \"/vis\",",
"\"--progress-bar\", default=False, action=\"store_true\", help=\"option to add progress bar to gifs, applies\" + \"only",
"i == 0) ] kargs = {'duration': interval} imageio.mimsave(directLong + \"/vis/vis.gif\", sorted_list, **kargs)",
"main plots\") parser.add_argument(\"--create-gif\", default=False, action=\"store_true\", help=\"option to activate gif creation\") parser.add_argument(\"--shrink-factor\", type=int, default=4,",
"is supplied\") parser.add_argument( \"--progress-bar\", default=False, action=\"store_true\", help=\"option to add progress bar to gifs,",
"be used in gif construction progress_bar (bool): True if progress bar should be",
"applies only\" + \" when --create-gif is supplied\") parser.add_argument(\"--skip-rate\", type=int, default=2, help=\"skip interval",
"import numpy as np from tqdm import tqdm from PIL import Image from",
"optimize from obj.arg_formatter import arg_metav_formatter def sorted_alphanumeric(data): \"\"\" Function to sort number-containing strings",
"output = subprocess.call(\"cat \" + directLong + \"/vis/vis.gif\" + \" | gif-progress --bar-color",
"+ \" gif applies only when --create-gif is supplied\") parser.add_argument(\"--interval\", type=float, default=0.1, help=\"time",
"+ \" when --create-gif is supplied\") parser.add_argument(\"--skip-rate\", type=int, default=2, help=\"skip interval when using",
"to construct\" + \" gif applies only when --create-gif is supplied\") parser.add_argument(\"--interval\", type=float,",
"images skip_rate (int): interval to images to use for gif interval (float): temporal",
"lambda key: [convert(c) for c in re.split('([0-9]+)', key)] return sorted(data, key=alphanum_key) def make_plot(direct,",
"graph \"\"\" direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong",
"construction or speed until (int): upper limit for epoch to be used in",
"image list sorted_list = sorted_alphanumeric(glob.glob(directLong + \"/img/*png\")) # assume all images are of",
"have on graph \"\"\" direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\",",
"images are of same size size = Image.open(sorted_list[0]).size new_size = tuple([int(el / shrink_factor)",
"sorted_list = sorted_alphanumeric(glob.glob(directLong + \"/img/*png\")) # assume all images are of same size",
"+ \"/vis/vis.gif\", directLong + \"/vis/vis.gif\") if progress_bar: print(\"adding progress bar to gif\") output",
"to sort Returns: (list): sorted list \"\"\" convert = lambda text: int(text) if",
"when constructing gifs from\" + \" images, applies only when --create-gif is supplied\")",
"sorted(data, key=alphanum_key) def make_plot(direct, number_ticks): \"\"\" Function to plot values from log csv",
"shell=True) if output != 0: sys.exit(\"error occurred with gif progress bar, do manual",
"# make vis directory within log directory os.makedirs(directLong + \"/vis\", exist_ok=True) subprocess.call( [\"Rscript\",",
"help=\"shrinking factor for images, applies only\" + \" when --create-gif is supplied\") parser.add_argument(\"--skip-rate\",",
"--bar-color '#000'\" + \" > \" + directLong + \"/vis/out.gif\", shell=True) if output",
"when --create-gif is supplied\") parser.add_argument( \"--progress-bar\", default=False, action=\"store_true\", help=\"option to add progress bar",
"to gifs, applies\" + \"only when --create-gif is supplied; check readme for\" +",
"default=10, help=\"number of x-axis ticks to use in main plots\") parser.add_argument(\"--create-gif\", default=False, action=\"store_true\",",
"images Args: direct (str): base directory of logged model shrink_factor (int): factor by",
"progress bar to gif\") output = subprocess.call(\"cat \" + directLong + \"/vis/vis.gif\" +",
"pygifsicle import optimize from obj.arg_formatter import arg_metav_formatter def sorted_alphanumeric(data): \"\"\" Function to sort",
"+ directLong + \"/vis/out.gif\", shell=True) if output != 0: sys.exit(\"error occurred with gif",
"size size = Image.open(sorted_list[0]).size new_size = tuple([int(el / shrink_factor) for el in size])",
"or i == 0) ] kargs = {'duration': interval} imageio.mimsave(directLong + \"/vis/vis.gif\", sorted_list,",
"Function to plot values from log csv file Args: direct (str): base directory",
"img in enumerate(tqdm(sorted_list)) if ((i + 1) % skip_rate == 0 or i",
"skip_rate == 0 or i == 0) ] kargs = {'duration': interval} imageio.mimsave(directLong",
"manual check\") if __name__ == \"__main__\": parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter) required = parser.add_argument_group(\"required name",
"\" + directLong + \"/vis/vis.gif\" + \" | gif-progress --bar-color '#000'\" + \"",
"directory os.makedirs(directLong + \"/vis\", exist_ok=True) subprocess.call( [\"Rscript\", \"gg.R\", \"-d\", directLong, \"-t\", str(number_ticks)]) def",
"direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong = \"./pickles/\" + direct if not os.path.isdir(directLong):",
"applies only when --create-gif is supplied\") parser.add_argument(\"--interval\", type=float, default=0.1, help=\"time interval when constructing",
"bar, do manual check\") if __name__ == \"__main__\": parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter) required =",
"which to\" + \" visualize\") parser.add_argument(\"--number-ticks\", type=int, default=10, help=\"number of x-axis ticks to",
"sort number-containing strings Args: data (list): list of strings to sort Returns: (list):",
"\"\"\" Function to create gif from images Args: direct (str): base directory of",
"True if progress bar should be added to gif \"\"\" print(\"creating training evolution",
"\"\"\" Function to plot values from log csv file Args: direct (str): base",
"supplied\") parser.add_argument(\"--interval\", type=float, default=0.1, help=\"time interval when constructing gifs from\" + \" images,",
"= sorted_alphanumeric(glob.glob(directLong + \"/img/*png\")) # assume all images are of same size size",
"direct if not os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") # make vis",
"interval when using images to construct\" + \" gif applies only when --create-gif",
"input direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong =",
"directory within pickles from which to\" + \" visualize\") parser.add_argument(\"--number-ticks\", type=int, default=10, help=\"number",
"import Image from pygifsicle import optimize from obj.arg_formatter import arg_metav_formatter def sorted_alphanumeric(data): \"\"\"",
"of logged model shrink_factor (int): factor by which to downsample images skip_rate (int):",
"plot make_plot(args.log_dir, args.number_ticks) # if necessary, make gif if args.create_gif: make_gif(args.log_dir, args.shrink_factor, args.skip_rate,",
"of same size size = Image.open(sorted_list[0]).size new_size = tuple([int(el / shrink_factor) for el",
"list sorted_list = sorted_alphanumeric(glob.glob(directLong + \"/img/*png\")) # assume all images are of same",
"help=\"option to activate gif creation\") parser.add_argument(\"--shrink-factor\", type=int, default=4, help=\"shrinking factor for images, applies",
"\"gg.R\", \"-d\", directLong, \"-t\", str(number_ticks)]) def make_gif(direct, shrink_factor=4, skip_rate=2, interval=0.1, until=None, progress_bar=False): \"\"\"",
"\"__main__\": parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter) required = parser.add_argument_group(\"required name arguments\") required.add_argument(\"--log-dir\", type=str, required=True, help=\"base",
"== 0) ] kargs = {'duration': interval} imageio.mimsave(directLong + \"/vis/vis.gif\", sorted_list, **kargs) optimize(directLong",
"directory of logged model shrink_factor (int): factor by which to downsample images skip_rate",
"get sorted image list sorted_list = sorted_alphanumeric(glob.glob(directLong + \"/img/*png\")) # assume all images",
"--create-gif is supplied\") parser.add_argument(\"--skip-rate\", type=int, default=2, help=\"skip interval when using images to construct\"",
"utf-8 -*- import sys import re import os import glob import imageio import",
"help=\"skip interval when using images to construct\" + \" gif applies only when",
"\" applies only when --create-gif is supplied\") parser.add_argument( \"--progress-bar\", default=False, action=\"store_true\", help=\"option to",
"on graph \"\"\" direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct)",
"# make plot make_plot(args.log_dir, args.number_ticks) # if necessary, make gif if args.create_gif: make_gif(args.log_dir,",
"required.add_argument(\"--log-dir\", type=str, required=True, help=\"base directory within pickles from which to\" + \" visualize\")",
"'#000'\" + \" > \" + directLong + \"/vis/out.gif\", shell=True) if output !=",
"parser.add_argument(\"--create-gif\", default=False, action=\"store_true\", help=\"option to activate gif creation\") parser.add_argument(\"--shrink-factor\", type=int, default=4, help=\"shrinking factor",
"sorted_list[:until] sorted_list = [ Image.open(img).resize(new_size, Image.ANTIALIAS) for i, img in enumerate(tqdm(sorted_list)) if ((i",
"make_plot(direct, number_ticks): \"\"\" Function to plot values from log csv file Args: direct",
"for gif creation,\" + \" applies only when --create-gif is supplied\") parser.add_argument( \"--progress-bar\",",
"skip_rate (int): interval to images to use for gif interval (float): temporal interval",
"parser.add_argument_group(\"required name arguments\") required.add_argument(\"--log-dir\", type=str, required=True, help=\"base directory within pickles from which to\"",
"str(number_ticks)]) def make_gif(direct, shrink_factor=4, skip_rate=2, interval=0.1, until=None, progress_bar=False): \"\"\" Function to create gif",
"def make_plot(direct, number_ticks): \"\"\" Function to plot values from log csv file Args:",
"is supplied; check readme for\" + \" additional go package installation instructions\") args",
"(list): list of strings to sort Returns: (list): sorted list \"\"\" convert =",
"\" | gif-progress --bar-color '#000'\" + \" > \" + directLong + \"/vis/out.gif\",",
"evolution gif\") # clean up directory input direct = re.sub(r\"(\\/)?$\", \"\", direct) direct",
"direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong = \"./pickles/\" + direct if not",
"alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)] return sorted(data, key=alphanum_key)",
"make vis directory within log directory os.makedirs(directLong + \"/vis\", exist_ok=True) subprocess.call( [\"Rscript\", \"gg.R\",",
"interval} imageio.mimsave(directLong + \"/vis/vis.gif\", sorted_list, **kargs) optimize(directLong + \"/vis/vis.gif\", directLong + \"/vis/vis.gif\") if",
"if __name__ == \"__main__\": parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter) required = parser.add_argument_group(\"required name arguments\") required.add_argument(\"--log-dir\",",
"to gif\") output = subprocess.call(\"cat \" + directLong + \"/vis/vis.gif\" + \" |",
"shrink_factor=4, skip_rate=2, interval=0.1, until=None, progress_bar=False): \"\"\" Function to create gif from images Args:",
"argparse.ArgumentParser(formatter_class=arg_metav_formatter) required = parser.add_argument_group(\"required name arguments\") required.add_argument(\"--log-dir\", type=str, required=True, help=\"base directory within pickles",
"\"/img/*png\")) # assume all images are of same size size = Image.open(sorted_list[0]).size new_size",
"/ shrink_factor) for el in size]) if isinstance(until, int): sorted_list = sorted_list[:until] sorted_list",
"\"/vis/vis.gif\") if progress_bar: print(\"adding progress bar to gif\") output = subprocess.call(\"cat \" +",
"= subprocess.call(\"cat \" + directLong + \"/vis/vis.gif\" + \" | gif-progress --bar-color '#000'\"",
"epoch limit for gif creation,\" + \" applies only when --create-gif is supplied\")",
"to use in main plots\") parser.add_argument(\"--create-gif\", default=False, action=\"store_true\", help=\"option to activate gif creation\")",
"+ \" does not exist\") # make vis directory within log directory os.makedirs(directLong",
"0) ] kargs = {'duration': interval} imageio.mimsave(directLong + \"/vis/vis.gif\", sorted_list, **kargs) optimize(directLong +",
"name arguments\") required.add_argument(\"--log-dir\", type=str, required=True, help=\"base directory within pickles from which to\" +",
"arguments\") required.add_argument(\"--log-dir\", type=str, required=True, help=\"base directory within pickles from which to\" + \"",
"== \"__main__\": parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter) required = parser.add_argument_group(\"required name arguments\") required.add_argument(\"--log-dir\", type=str, required=True,",
"# if necessary, make gif if args.create_gif: make_gif(args.log_dir, args.shrink_factor, args.skip_rate, args.interval, args.until, args.progress_bar)",
"factor for images, applies only\" + \" when --create-gif is supplied\") parser.add_argument(\"--skip-rate\", type=int,",
"bar should be added to gif \"\"\" print(\"creating training evolution gif\") # clean",
"list \"\"\" convert = lambda text: int(text) if text.isdigit() else text.lower() alphanum_key =",
"int(text) if text.isdigit() else text.lower() alphanum_key = lambda key: [convert(c) for c in",
"tuple([int(el / shrink_factor) for el in size]) if isinstance(until, int): sorted_list = sorted_list[:until]",
"activate gif creation\") parser.add_argument(\"--shrink-factor\", type=int, default=4, help=\"shrinking factor for images, applies only\" +",
"re.split('([0-9]+)', key)] return sorted(data, key=alphanum_key) def make_plot(direct, number_ticks): \"\"\" Function to plot values",
"\"-d\", directLong, \"-t\", str(number_ticks)]) def make_gif(direct, shrink_factor=4, skip_rate=2, interval=0.1, until=None, progress_bar=False): \"\"\" Function",
"supplied; check readme for\" + \" additional go package installation instructions\") args =",
"def make_gif(direct, shrink_factor=4, skip_rate=2, interval=0.1, until=None, progress_bar=False): \"\"\" Function to create gif from",
"--create-gif is supplied\") parser.add_argument( \"--progress-bar\", default=False, action=\"store_true\", help=\"option to add progress bar to",
"when using images to construct\" + \" gif applies only when --create-gif is",
"additional go package installation instructions\") args = parser.parse_args() # make plot make_plot(args.log_dir, args.number_ticks)",
"sorted_list, **kargs) optimize(directLong + \"/vis/vis.gif\", directLong + \"/vis/vis.gif\") if progress_bar: print(\"adding progress bar",
"in re.split('([0-9]+)', key)] return sorted(data, key=alphanum_key) def make_plot(direct, number_ticks): \"\"\" Function to plot",
"\"/vis\", exist_ok=True) subprocess.call( [\"Rscript\", \"gg.R\", \"-d\", directLong, \"-t\", str(number_ticks)]) def make_gif(direct, shrink_factor=4, skip_rate=2,",
"to sort number-containing strings Args: data (list): list of strings to sort Returns:",
"PIL import Image from pygifsicle import optimize from obj.arg_formatter import arg_metav_formatter def sorted_alphanumeric(data):",
"vis directory within log directory os.makedirs(directLong + \"/vis\", exist_ok=True) subprocess.call( [\"Rscript\", \"gg.R\", \"-d\",",
"direct) directLong = \"./pickles/\" + direct if not os.path.isdir(directLong): sys.exit(directLong + \" does",
"direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong = \"./pickles/\"",
"# get sorted image list sorted_list = sorted_alphanumeric(glob.glob(directLong + \"/img/*png\")) # assume all",
"= argparse.ArgumentParser(formatter_class=arg_metav_formatter) required = parser.add_argument_group(\"required name arguments\") required.add_argument(\"--log-dir\", type=str, required=True, help=\"base directory within",
"+ \" applies only when --create-gif is supplied\") parser.add_argument( \"--progress-bar\", default=False, action=\"store_true\", help=\"option",
"make plot make_plot(args.log_dir, args.number_ticks) # if necessary, make gif if args.create_gif: make_gif(args.log_dir, args.shrink_factor,",
"default=None, help=\"set upper epoch limit for gif creation,\" + \" applies only when",
"import optimize from obj.arg_formatter import arg_metav_formatter def sorted_alphanumeric(data): \"\"\" Function to sort number-containing",
"interval (float): temporal interval for gif construction or speed until (int): upper limit",
"kargs = {'duration': interval} imageio.mimsave(directLong + \"/vis/vis.gif\", sorted_list, **kargs) optimize(directLong + \"/vis/vis.gif\", directLong",
"model shrink_factor (int): factor by which to downsample images skip_rate (int): interval to",
"isinstance(until, int): sorted_list = sorted_list[:until] sorted_list = [ Image.open(img).resize(new_size, Image.ANTIALIAS) for i, img",
"not os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") # get sorted image list",
"when --create-gif is supplied\") parser.add_argument(\"--until\", type=int, default=None, help=\"set upper epoch limit for gif",
"!= 0: sys.exit(\"error occurred with gif progress bar, do manual check\") if __name__",
"lambda text: int(text) if text.isdigit() else text.lower() alphanum_key = lambda key: [convert(c) for",
"text.isdigit() else text.lower() alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]",
"{'duration': interval} imageio.mimsave(directLong + \"/vis/vis.gif\", sorted_list, **kargs) optimize(directLong + \"/vis/vis.gif\", directLong + \"/vis/vis.gif\")",
"directLong + \"/vis/vis.gif\" + \" | gif-progress --bar-color '#000'\" + \" > \"",
"= sorted_list[:until] sorted_list = [ Image.open(img).resize(new_size, Image.ANTIALIAS) for i, img in enumerate(tqdm(sorted_list)) if",
"0 or i == 0) ] kargs = {'duration': interval} imageio.mimsave(directLong + \"/vis/vis.gif\",",
"within log directory os.makedirs(directLong + \"/vis\", exist_ok=True) subprocess.call( [\"Rscript\", \"gg.R\", \"-d\", directLong, \"-t\",",
"\" does not exist\") # make vis directory within log directory os.makedirs(directLong +",
"for i, img in enumerate(tqdm(sorted_list)) if ((i + 1) % skip_rate == 0",
"to add progress bar to gifs, applies\" + \"only when --create-gif is supplied;",
"import argparse import subprocess import numpy as np from tqdm import tqdm from",
"speed until (int): upper limit for epoch to be used in gif construction",
"coding: utf-8 -*- import sys import re import os import glob import imageio",
"progress bar, do manual check\") if __name__ == \"__main__\": parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter) required",
"which to downsample images skip_rate (int): interval to images to use for gif",
"training evolution gif\") # clean up directory input direct = re.sub(r\"(\\/)?$\", \"\", direct)",
"using images to construct\" + \" gif applies only when --create-gif is supplied\")",
"not exist\") # make vis directory within log directory os.makedirs(directLong + \"/vis\", exist_ok=True)",
"constructing gifs from\" + \" images, applies only when --create-gif is supplied\") parser.add_argument(\"--until\",",
"interval when constructing gifs from\" + \" images, applies only when --create-gif is",
"add progress bar to gifs, applies\" + \"only when --create-gif is supplied; check",
"+ \"only when --create-gif is supplied; check readme for\" + \" additional go",
"\" does not exist\") # get sorted image list sorted_list = sorted_alphanumeric(glob.glob(directLong +",
"(int): upper limit for epoch to be used in gif construction progress_bar (bool):",
"gif\") # clean up directory input direct = re.sub(r\"(\\/)?$\", \"\", direct) direct =",
"import arg_metav_formatter def sorted_alphanumeric(data): \"\"\" Function to sort number-containing strings Args: data (list):",
"arg_metav_formatter def sorted_alphanumeric(data): \"\"\" Function to sort number-containing strings Args: data (list): list",
"to create gif from images Args: direct (str): base directory of logged model",
"model number_ticks (int): number of ticks to have on graph \"\"\" direct =",
"== 0 or i == 0) ] kargs = {'duration': interval} imageio.mimsave(directLong +",
"parser.parse_args() # make plot make_plot(args.log_dir, args.number_ticks) # if necessary, make gif if args.create_gif:",
"os.makedirs(directLong + \"/vis\", exist_ok=True) subprocess.call( [\"Rscript\", \"gg.R\", \"-d\", directLong, \"-t\", str(number_ticks)]) def make_gif(direct,",
"optimize(directLong + \"/vis/vis.gif\", directLong + \"/vis/vis.gif\") if progress_bar: print(\"adding progress bar to gif\")",
"return sorted(data, key=alphanum_key) def make_plot(direct, number_ticks): \"\"\" Function to plot values from log",
"by which to downsample images skip_rate (int): interval to images to use for",
"direct (str): base directory of logged model number_ticks (int): number of ticks to",
"subprocess.call( [\"Rscript\", \"gg.R\", \"-d\", directLong, \"-t\", str(number_ticks)]) def make_gif(direct, shrink_factor=4, skip_rate=2, interval=0.1, until=None,",
"= lambda text: int(text) if text.isdigit() else text.lower() alphanum_key = lambda key: [convert(c)",
"Returns: (list): sorted list \"\"\" convert = lambda text: int(text) if text.isdigit() else",
"__name__ == \"__main__\": parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter) required = parser.add_argument_group(\"required name arguments\") required.add_argument(\"--log-dir\", type=str,",
"to activate gif creation\") parser.add_argument(\"--shrink-factor\", type=int, default=4, help=\"shrinking factor for images, applies only\"",
"if not os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") # get sorted image",
"base directory of logged model shrink_factor (int): factor by which to downsample images",
"gif construction progress_bar (bool): True if progress bar should be added to gif",
"default=4, help=\"shrinking factor for images, applies only\" + \" when --create-gif is supplied\")",
"when --create-gif is supplied\") parser.add_argument(\"--interval\", type=float, default=0.1, help=\"time interval when constructing gifs from\"",
"instructions\") args = parser.parse_args() # make plot make_plot(args.log_dir, args.number_ticks) # if necessary, make",
"-*- coding: utf-8 -*- import sys import re import os import glob import",
"key=alphanum_key) def make_plot(direct, number_ticks): \"\"\" Function to plot values from log csv file",
"[\"Rscript\", \"gg.R\", \"-d\", directLong, \"-t\", str(number_ticks)]) def make_gif(direct, shrink_factor=4, skip_rate=2, interval=0.1, until=None, progress_bar=False):",
"parser.add_argument(\"--interval\", type=float, default=0.1, help=\"time interval when constructing gifs from\" + \" images, applies",
"from tqdm import tqdm from PIL import Image from pygifsicle import optimize from",
"# assume all images are of same size size = Image.open(sorted_list[0]).size new_size =",
"gif \"\"\" print(\"creating training evolution gif\") # clean up directory input direct =",
"direct (str): base directory of logged model shrink_factor (int): factor by which to",
"to gif \"\"\" print(\"creating training evolution gif\") # clean up directory input direct",
"images to construct\" + \" gif applies only when --create-gif is supplied\") parser.add_argument(\"--interval\",",
"Args: direct (str): base directory of logged model number_ticks (int): number of ticks",
"to downsample images skip_rate (int): interval to images to use for gif interval",
"#!/usr/bin/env python3 # -*- coding: utf-8 -*- import sys import re import os",
"sorted_list = [ Image.open(img).resize(new_size, Image.ANTIALIAS) for i, img in enumerate(tqdm(sorted_list)) if ((i +",
"= {'duration': interval} imageio.mimsave(directLong + \"/vis/vis.gif\", sorted_list, **kargs) optimize(directLong + \"/vis/vis.gif\", directLong +",
"is supplied\") parser.add_argument(\"--skip-rate\", type=int, default=2, help=\"skip interval when using images to construct\" +",
"type=float, default=0.1, help=\"time interval when constructing gifs from\" + \" images, applies only",
"(str): base directory of logged model number_ticks (int): number of ticks to have",
"+ \"/img/*png\")) # assume all images are of same size size = Image.open(sorted_list[0]).size",
"+ \" additional go package installation instructions\") args = parser.parse_args() # make plot",
"\"/vis/vis.gif\", directLong + \"/vis/vis.gif\") if progress_bar: print(\"adding progress bar to gif\") output =",
"log directory os.makedirs(directLong + \"/vis\", exist_ok=True) subprocess.call( [\"Rscript\", \"gg.R\", \"-d\", directLong, \"-t\", str(number_ticks)])",
"type=int, default=2, help=\"skip interval when using images to construct\" + \" gif applies",
"directLong + \"/vis/vis.gif\") if progress_bar: print(\"adding progress bar to gif\") output = subprocess.call(\"cat",
"values from log csv file Args: direct (str): base directory of logged model",
"same size size = Image.open(sorted_list[0]).size new_size = tuple([int(el / shrink_factor) for el in",
"exist\") # get sorted image list sorted_list = sorted_alphanumeric(glob.glob(directLong + \"/img/*png\")) # assume",
"with gif progress bar, do manual check\") if __name__ == \"__main__\": parser =",
"Args: direct (str): base directory of logged model shrink_factor (int): factor by which",
"default=False, action=\"store_true\", help=\"option to add progress bar to gifs, applies\" + \"only when",
"for images, applies only\" + \" when --create-gif is supplied\") parser.add_argument(\"--skip-rate\", type=int, default=2,",
"os import glob import imageio import argparse import subprocess import numpy as np",
"= re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong = \"./pickles/\" +",
"limit for epoch to be used in gif construction progress_bar (bool): True if",
"-*- import sys import re import os import glob import imageio import argparse",
"use in main plots\") parser.add_argument(\"--create-gif\", default=False, action=\"store_true\", help=\"option to activate gif creation\") parser.add_argument(\"--shrink-factor\",",
"in enumerate(tqdm(sorted_list)) if ((i + 1) % skip_rate == 0 or i ==",
"bar to gifs, applies\" + \"only when --create-gif is supplied; check readme for\"",
"construct\" + \" gif applies only when --create-gif is supplied\") parser.add_argument(\"--interval\", type=float, default=0.1,",
"\" when --create-gif is supplied\") parser.add_argument(\"--skip-rate\", type=int, default=2, help=\"skip interval when using images",
"(int): factor by which to downsample images skip_rate (int): interval to images to",
"help=\"set upper epoch limit for gif creation,\" + \" applies only when --create-gif",
"visualize\") parser.add_argument(\"--number-ticks\", type=int, default=10, help=\"number of x-axis ticks to use in main plots\")",
"+ \" images, applies only when --create-gif is supplied\") parser.add_argument(\"--until\", type=int, default=None, help=\"set",
"within pickles from which to\" + \" visualize\") parser.add_argument(\"--number-ticks\", type=int, default=10, help=\"number of",
"from obj.arg_formatter import arg_metav_formatter def sorted_alphanumeric(data): \"\"\" Function to sort number-containing strings Args:",
"to images to use for gif interval (float): temporal interval for gif construction",
"check readme for\" + \" additional go package installation instructions\") args = parser.parse_args()",
"gifs, applies\" + \"only when --create-gif is supplied; check readme for\" + \"",
"creation\") parser.add_argument(\"--shrink-factor\", type=int, default=4, help=\"shrinking factor for images, applies only\" + \" when",
"# -*- coding: utf-8 -*- import sys import re import os import glob",
"type=str, required=True, help=\"base directory within pickles from which to\" + \" visualize\") parser.add_argument(\"--number-ticks\",",
"installation instructions\") args = parser.parse_args() # make plot make_plot(args.log_dir, args.number_ticks) # if necessary,",
"in size]) if isinstance(until, int): sorted_list = sorted_list[:until] sorted_list = [ Image.open(img).resize(new_size, Image.ANTIALIAS)",
"gif-progress --bar-color '#000'\" + \" > \" + directLong + \"/vis/out.gif\", shell=True) if",
"+ \" visualize\") parser.add_argument(\"--number-ticks\", type=int, default=10, help=\"number of x-axis ticks to use in",
"subprocess import numpy as np from tqdm import tqdm from PIL import Image",
"Image.open(sorted_list[0]).size new_size = tuple([int(el / shrink_factor) for el in size]) if isinstance(until, int):",
"re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong = \"./pickles/\" + direct if not os.path.isdir(directLong): sys.exit(directLong +",
"package installation instructions\") args = parser.parse_args() # make plot make_plot(args.log_dir, args.number_ticks) # if",
"until=None, progress_bar=False): \"\"\" Function to create gif from images Args: direct (str): base",
"only when --create-gif is supplied\") parser.add_argument(\"--interval\", type=float, default=0.1, help=\"time interval when constructing gifs",
"parser.add_argument( \"--progress-bar\", default=False, action=\"store_true\", help=\"option to add progress bar to gifs, applies\" +",
"(int): interval to images to use for gif interval (float): temporal interval for",
"# clean up directory input direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\",",
"python3 # -*- coding: utf-8 -*- import sys import re import os import",
"= tuple([int(el / shrink_factor) for el in size]) if isinstance(until, int): sorted_list =",
"output != 0: sys.exit(\"error occurred with gif progress bar, do manual check\") if",
"number_ticks): \"\"\" Function to plot values from log csv file Args: direct (str):",
"\" gif applies only when --create-gif is supplied\") parser.add_argument(\"--interval\", type=float, default=0.1, help=\"time interval",
"strings Args: data (list): list of strings to sort Returns: (list): sorted list",
"only when --create-gif is supplied\") parser.add_argument( \"--progress-bar\", default=False, action=\"store_true\", help=\"option to add progress",
"of strings to sort Returns: (list): sorted list \"\"\" convert = lambda text:",
"Function to create gif from images Args: direct (str): base directory of logged",
"to be used in gif construction progress_bar (bool): True if progress bar should",
"(bool): True if progress bar should be added to gif \"\"\" print(\"creating training",
"import sys import re import os import glob import imageio import argparse import",
"\"\"\" convert = lambda text: int(text) if text.isdigit() else text.lower() alphanum_key = lambda",
"(float): temporal interval for gif construction or speed until (int): upper limit for",
"in main plots\") parser.add_argument(\"--create-gif\", default=False, action=\"store_true\", help=\"option to activate gif creation\") parser.add_argument(\"--shrink-factor\", type=int,",
"text.lower() alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)] return sorted(data,",
"= \"./pickles/\" + direct if not os.path.isdir(directLong): sys.exit(directLong + \" does not exist\")",
"when --create-gif is supplied\") parser.add_argument(\"--skip-rate\", type=int, default=2, help=\"skip interval when using images to",
"bar to gif\") output = subprocess.call(\"cat \" + directLong + \"/vis/vis.gif\" + \"",
"new_size = tuple([int(el / shrink_factor) for el in size]) if isinstance(until, int): sorted_list",
"--create-gif is supplied\") parser.add_argument(\"--until\", type=int, default=None, help=\"set upper epoch limit for gif creation,\"",
"does not exist\") # get sorted image list sorted_list = sorted_alphanumeric(glob.glob(directLong + \"/img/*png\"))",
"os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") # make vis directory within log",
"import re import os import glob import imageio import argparse import subprocess import",
"from log csv file Args: direct (str): base directory of logged model number_ticks",
"**kargs) optimize(directLong + \"/vis/vis.gif\", directLong + \"/vis/vis.gif\") if progress_bar: print(\"adding progress bar to",
"parser.add_argument(\"--shrink-factor\", type=int, default=4, help=\"shrinking factor for images, applies only\" + \" when --create-gif",
"until (int): upper limit for epoch to be used in gif construction progress_bar",
"directLong = \"./pickles/\" + direct if not os.path.isdir(directLong): sys.exit(directLong + \" does not",
"= parser.add_argument_group(\"required name arguments\") required.add_argument(\"--log-dir\", type=str, required=True, help=\"base directory within pickles from which",
"from PIL import Image from pygifsicle import optimize from obj.arg_formatter import arg_metav_formatter def",
"print(\"adding progress bar to gif\") output = subprocess.call(\"cat \" + directLong + \"/vis/vis.gif\"",
"downsample images skip_rate (int): interval to images to use for gif interval (float):",
"sorted image list sorted_list = sorted_alphanumeric(glob.glob(directLong + \"/img/*png\")) # assume all images are",
"temporal interval for gif construction or speed until (int): upper limit for epoch",
"supplied\") parser.add_argument( \"--progress-bar\", default=False, action=\"store_true\", help=\"option to add progress bar to gifs, applies\"",
"help=\"time interval when constructing gifs from\" + \" images, applies only when --create-gif",
"gif construction or speed until (int): upper limit for epoch to be used",
"csv file Args: direct (str): base directory of logged model number_ticks (int): number",
"def sorted_alphanumeric(data): \"\"\" Function to sort number-containing strings Args: data (list): list of",
"subprocess.call(\"cat \" + directLong + \"/vis/vis.gif\" + \" | gif-progress --bar-color '#000'\" +",
"sorted_alphanumeric(glob.glob(directLong + \"/img/*png\")) # assume all images are of same size size =",
"import tqdm from PIL import Image from pygifsicle import optimize from obj.arg_formatter import",
"\"/vis/vis.gif\", sorted_list, **kargs) optimize(directLong + \"/vis/vis.gif\", directLong + \"/vis/vis.gif\") if progress_bar: print(\"adding progress",
"direct if not os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") # get sorted",
"Image from pygifsicle import optimize from obj.arg_formatter import arg_metav_formatter def sorted_alphanumeric(data): \"\"\" Function",
"skip_rate=2, interval=0.1, until=None, progress_bar=False): \"\"\" Function to create gif from images Args: direct",
"for gif interval (float): temporal interval for gif construction or speed until (int):",
"go package installation instructions\") args = parser.parse_args() # make plot make_plot(args.log_dir, args.number_ticks) #",
"directory input direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong",
"number_ticks (int): number of ticks to have on graph \"\"\" direct = re.sub(r\"(\\/)?$\",",
"to use for gif interval (float): temporal interval for gif construction or speed",
"--create-gif is supplied; check readme for\" + \" additional go package installation instructions\")",
"in gif construction progress_bar (bool): True if progress bar should be added to",
"\" + directLong + \"/vis/out.gif\", shell=True) if output != 0: sys.exit(\"error occurred with",
"tqdm import tqdm from PIL import Image from pygifsicle import optimize from obj.arg_formatter",
"+ \"/vis/vis.gif\", sorted_list, **kargs) optimize(directLong + \"/vis/vis.gif\", directLong + \"/vis/vis.gif\") if progress_bar: print(\"adding",
"\"\"\" Function to sort number-containing strings Args: data (list): list of strings to",
"Function to sort number-containing strings Args: data (list): list of strings to sort",
"images, applies only\" + \" when --create-gif is supplied\") parser.add_argument(\"--skip-rate\", type=int, default=2, help=\"skip",
"\"\", direct) directLong = \"./pickles/\" + direct if not os.path.isdir(directLong): sys.exit(directLong + \"",
"is supplied\") parser.add_argument(\"--interval\", type=float, default=0.1, help=\"time interval when constructing gifs from\" + \"",
"when --create-gif is supplied; check readme for\" + \" additional go package installation",
"sys import re import os import glob import imageio import argparse import subprocess",
"up directory input direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct)",
"interval to images to use for gif interval (float): temporal interval for gif",
"default=2, help=\"skip interval when using images to construct\" + \" gif applies only",
"make_plot(args.log_dir, args.number_ticks) # if necessary, make gif if args.create_gif: make_gif(args.log_dir, args.shrink_factor, args.skip_rate, args.interval,",
"sys.exit(directLong + \" does not exist\") # make vis directory within log directory",
"+ 1) % skip_rate == 0 or i == 0) ] kargs =",
"only\" + \" when --create-gif is supplied\") parser.add_argument(\"--skip-rate\", type=int, default=2, help=\"skip interval when",
"+ \"/vis/vis.gif\" + \" | gif-progress --bar-color '#000'\" + \" > \" +",
"obj.arg_formatter import arg_metav_formatter def sorted_alphanumeric(data): \"\"\" Function to sort number-containing strings Args: data",
"enumerate(tqdm(sorted_list)) if ((i + 1) % skip_rate == 0 or i == 0)",
"re import os import glob import imageio import argparse import subprocess import numpy",
"\"-t\", str(number_ticks)]) def make_gif(direct, shrink_factor=4, skip_rate=2, interval=0.1, until=None, progress_bar=False): \"\"\" Function to create",
"> \" + directLong + \"/vis/out.gif\", shell=True) if output != 0: sys.exit(\"error occurred",
"is supplied\") parser.add_argument(\"--until\", type=int, default=None, help=\"set upper epoch limit for gif creation,\" +",
"interval for gif construction or speed until (int): upper limit for epoch to",
"Image.ANTIALIAS) for i, img in enumerate(tqdm(sorted_list)) if ((i + 1) % skip_rate ==",
"for c in re.split('([0-9]+)', key)] return sorted(data, key=alphanum_key) def make_plot(direct, number_ticks): \"\"\" Function",
"| gif-progress --bar-color '#000'\" + \" > \" + directLong + \"/vis/out.gif\", shell=True)",
"to\" + \" visualize\") parser.add_argument(\"--number-ticks\", type=int, default=10, help=\"number of x-axis ticks to use",
"% skip_rate == 0 or i == 0) ] kargs = {'duration': interval}",
"epoch to be used in gif construction progress_bar (bool): True if progress bar",
"clean up directory input direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\",",
"if ((i + 1) % skip_rate == 0 or i == 0) ]",
"(int): number of ticks to have on graph \"\"\" direct = re.sub(r\"(\\/)?$\", \"\",",
"all images are of same size size = Image.open(sorted_list[0]).size new_size = tuple([int(el /",
"gif progress bar, do manual check\") if __name__ == \"__main__\": parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter)",
"help=\"number of x-axis ticks to use in main plots\") parser.add_argument(\"--create-gif\", default=False, action=\"store_true\", help=\"option",
"images to use for gif interval (float): temporal interval for gif construction or",
"shrink_factor) for el in size]) if isinstance(until, int): sorted_list = sorted_list[:until] sorted_list =",
"--create-gif is supplied\") parser.add_argument(\"--interval\", type=float, default=0.1, help=\"time interval when constructing gifs from\" +",
"progress_bar=False): \"\"\" Function to create gif from images Args: direct (str): base directory",
"as np from tqdm import tqdm from PIL import Image from pygifsicle import",
"args = parser.parse_args() # make plot make_plot(args.log_dir, args.number_ticks) # if necessary, make gif",
"parser.add_argument(\"--skip-rate\", type=int, default=2, help=\"skip interval when using images to construct\" + \" gif",
"plots\") parser.add_argument(\"--create-gif\", default=False, action=\"store_true\", help=\"option to activate gif creation\") parser.add_argument(\"--shrink-factor\", type=int, default=4, help=\"shrinking",
"ticks to use in main plots\") parser.add_argument(\"--create-gif\", default=False, action=\"store_true\", help=\"option to activate gif",
"assume all images are of same size size = Image.open(sorted_list[0]).size new_size = tuple([int(el",
"gif creation\") parser.add_argument(\"--shrink-factor\", type=int, default=4, help=\"shrinking factor for images, applies only\" + \"",
"+ \"/vis\", exist_ok=True) subprocess.call( [\"Rscript\", \"gg.R\", \"-d\", directLong, \"-t\", str(number_ticks)]) def make_gif(direct, shrink_factor=4,",
"\" images, applies only when --create-gif is supplied\") parser.add_argument(\"--until\", type=int, default=None, help=\"set upper",
"+ \" | gif-progress --bar-color '#000'\" + \" > \" + directLong +",
"do manual check\") if __name__ == \"__main__\": parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter) required = parser.add_argument_group(\"required",
"help=\"option to add progress bar to gifs, applies\" + \"only when --create-gif is",
"convert = lambda text: int(text) if text.isdigit() else text.lower() alphanum_key = lambda key:",
"should be added to gif \"\"\" print(\"creating training evolution gif\") # clean up",
"occurred with gif progress bar, do manual check\") if __name__ == \"__main__\": parser",
"argparse import subprocess import numpy as np from tqdm import tqdm from PIL",
"directory of logged model number_ticks (int): number of ticks to have on graph",
"pickles from which to\" + \" visualize\") parser.add_argument(\"--number-ticks\", type=int, default=10, help=\"number of x-axis",
"if text.isdigit() else text.lower() alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)',",
"directory within log directory os.makedirs(directLong + \"/vis\", exist_ok=True) subprocess.call( [\"Rscript\", \"gg.R\", \"-d\", directLong,",
"= lambda key: [convert(c) for c in re.split('([0-9]+)', key)] return sorted(data, key=alphanum_key) def",
"base directory of logged model number_ticks (int): number of ticks to have on",
"tqdm from PIL import Image from pygifsicle import optimize from obj.arg_formatter import arg_metav_formatter",
"size = Image.open(sorted_list[0]).size new_size = tuple([int(el / shrink_factor) for el in size]) if",
"[convert(c) for c in re.split('([0-9]+)', key)] return sorted(data, key=alphanum_key) def make_plot(direct, number_ticks): \"\"\"",
"type=int, default=4, help=\"shrinking factor for images, applies only\" + \" when --create-gif is",
"args.number_ticks) # if necessary, make gif if args.create_gif: make_gif(args.log_dir, args.shrink_factor, args.skip_rate, args.interval, args.until,",
"\"/vis/out.gif\", shell=True) if output != 0: sys.exit(\"error occurred with gif progress bar, do",
"\" additional go package installation instructions\") args = parser.parse_args() # make plot make_plot(args.log_dir,",
"type=int, default=10, help=\"number of x-axis ticks to use in main plots\") parser.add_argument(\"--create-gif\", default=False,",
"required=True, help=\"base directory within pickles from which to\" + \" visualize\") parser.add_argument(\"--number-ticks\", type=int,",
"from\" + \" images, applies only when --create-gif is supplied\") parser.add_argument(\"--until\", type=int, default=None,",
"create gif from images Args: direct (str): base directory of logged model shrink_factor",
"directLong, \"-t\", str(number_ticks)]) def make_gif(direct, shrink_factor=4, skip_rate=2, interval=0.1, until=None, progress_bar=False): \"\"\" Function to",
"[ Image.open(img).resize(new_size, Image.ANTIALIAS) for i, img in enumerate(tqdm(sorted_list)) if ((i + 1) %",
"if isinstance(until, int): sorted_list = sorted_list[:until] sorted_list = [ Image.open(img).resize(new_size, Image.ANTIALIAS) for i,",
"of x-axis ticks to use in main plots\") parser.add_argument(\"--create-gif\", default=False, action=\"store_true\", help=\"option to",
"+ directLong + \"/vis/vis.gif\" + \" | gif-progress --bar-color '#000'\" + \" >",
"parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter) required = parser.add_argument_group(\"required name arguments\") required.add_argument(\"--log-dir\", type=str, required=True, help=\"base directory",
"applies\" + \"only when --create-gif is supplied; check readme for\" + \" additional",
"+ direct if not os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") # get",
"or speed until (int): upper limit for epoch to be used in gif",
"\" visualize\") parser.add_argument(\"--number-ticks\", type=int, default=10, help=\"number of x-axis ticks to use in main",
"action=\"store_true\", help=\"option to add progress bar to gifs, applies\" + \"only when --create-gif",
"progress bar should be added to gif \"\"\" print(\"creating training evolution gif\") #",
"for\" + \" additional go package installation instructions\") args = parser.parse_args() # make",
"i, img in enumerate(tqdm(sorted_list)) if ((i + 1) % skip_rate == 0 or",
"import imageio import argparse import subprocess import numpy as np from tqdm import",
"size]) if isinstance(until, int): sorted_list = sorted_list[:until] sorted_list = [ Image.open(img).resize(new_size, Image.ANTIALIAS) for",
"\" > \" + directLong + \"/vis/out.gif\", shell=True) if output != 0: sys.exit(\"error",
"to have on graph \"\"\" direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\",",
"interval=0.1, until=None, progress_bar=False): \"\"\" Function to create gif from images Args: direct (str):",
"import glob import imageio import argparse import subprocess import numpy as np from",
"not exist\") # get sorted image list sorted_list = sorted_alphanumeric(glob.glob(directLong + \"/img/*png\")) #",
"if progress bar should be added to gif \"\"\" print(\"creating training evolution gif\")",
"((i + 1) % skip_rate == 0 or i == 0) ] kargs",
"\"./pickles/\" + direct if not os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") #",
"supplied\") parser.add_argument(\"--skip-rate\", type=int, default=2, help=\"skip interval when using images to construct\" + \"",
"ticks to have on graph \"\"\" direct = re.sub(r\"(\\/)?$\", \"\", direct) direct =",
"os.path.isdir(directLong): sys.exit(directLong + \" does not exist\") # get sorted image list sorted_list",
"+ \"/vis/out.gif\", shell=True) if output != 0: sys.exit(\"error occurred with gif progress bar,",
"gif from images Args: direct (str): base directory of logged model shrink_factor (int):",
"logged model shrink_factor (int): factor by which to downsample images skip_rate (int): interval",
"= parser.parse_args() # make plot make_plot(args.log_dir, args.number_ticks) # if necessary, make gif if",
"import subprocess import numpy as np from tqdm import tqdm from PIL import",
"added to gif \"\"\" print(\"creating training evolution gif\") # clean up directory input",
"np from tqdm import tqdm from PIL import Image from pygifsicle import optimize",
"be added to gif \"\"\" print(\"creating training evolution gif\") # clean up directory",
"imageio.mimsave(directLong + \"/vis/vis.gif\", sorted_list, **kargs) optimize(directLong + \"/vis/vis.gif\", directLong + \"/vis/vis.gif\") if progress_bar:",
"\"\"\" direct = re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong =",
"creation,\" + \" applies only when --create-gif is supplied\") parser.add_argument( \"--progress-bar\", default=False, action=\"store_true\",",
"glob import imageio import argparse import subprocess import numpy as np from tqdm",
"used in gif construction progress_bar (bool): True if progress bar should be added",
"else text.lower() alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)] return",
"1) % skip_rate == 0 or i == 0) ] kargs = {'duration':",
"sorted_list = sorted_list[:until] sorted_list = [ Image.open(img).resize(new_size, Image.ANTIALIAS) for i, img in enumerate(tqdm(sorted_list))",
"c in re.split('([0-9]+)', key)] return sorted(data, key=alphanum_key) def make_plot(direct, number_ticks): \"\"\" Function to",
"\"\"\" print(\"creating training evolution gif\") # clean up directory input direct = re.sub(r\"(\\/)?$\",",
"only when --create-gif is supplied\") parser.add_argument(\"--until\", type=int, default=None, help=\"set upper epoch limit for",
"required = parser.add_argument_group(\"required name arguments\") required.add_argument(\"--log-dir\", type=str, required=True, help=\"base directory within pickles from",
"text: int(text) if text.isdigit() else text.lower() alphanum_key = lambda key: [convert(c) for c",
"re.sub(r\"(\\/)?$\", \"\", direct) direct = re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong = \"./pickles/\" + direct",
"readme for\" + \" additional go package installation instructions\") args = parser.parse_args() #",
"sort Returns: (list): sorted list \"\"\" convert = lambda text: int(text) if text.isdigit()",
"sys.exit(directLong + \" does not exist\") # get sorted image list sorted_list =",
"for el in size]) if isinstance(until, int): sorted_list = sorted_list[:until] sorted_list = [",
"gif\") output = subprocess.call(\"cat \" + directLong + \"/vis/vis.gif\" + \" | gif-progress",
"parser.add_argument(\"--number-ticks\", type=int, default=10, help=\"number of x-axis ticks to use in main plots\") parser.add_argument(\"--create-gif\",",
"= Image.open(sorted_list[0]).size new_size = tuple([int(el / shrink_factor) for el in size]) if isinstance(until,",
"shrink_factor (int): factor by which to downsample images skip_rate (int): interval to images",
"applies only when --create-gif is supplied\") parser.add_argument(\"--until\", type=int, default=None, help=\"set upper epoch limit",
"gif creation,\" + \" applies only when --create-gif is supplied\") parser.add_argument( \"--progress-bar\", default=False,",
"= re.sub(r\"(\\.\\/)?pickles\\/\", \"\", direct) directLong = \"./pickles/\" + direct if not os.path.isdir(directLong): sys.exit(directLong",
"\"/vis/vis.gif\" + \" | gif-progress --bar-color '#000'\" + \" > \" + directLong",
"x-axis ticks to use in main plots\") parser.add_argument(\"--create-gif\", default=False, action=\"store_true\", help=\"option to activate",
"directLong + \"/vis/out.gif\", shell=True) if output != 0: sys.exit(\"error occurred with gif progress",
"list of strings to sort Returns: (list): sorted list \"\"\" convert = lambda",
"from which to\" + \" visualize\") parser.add_argument(\"--number-ticks\", type=int, default=10, help=\"number of x-axis ticks",
"log csv file Args: direct (str): base directory of logged model number_ticks (int):",
"plot values from log csv file Args: direct (str): base directory of logged",
"limit for gif creation,\" + \" applies only when --create-gif is supplied\") parser.add_argument(",
"+ \"/vis/vis.gif\") if progress_bar: print(\"adding progress bar to gif\") output = subprocess.call(\"cat \"",
"upper epoch limit for gif creation,\" + \" applies only when --create-gif is",
"factor by which to downsample images skip_rate (int): interval to images to use",
"number of ticks to have on graph \"\"\" direct = re.sub(r\"(\\/)?$\", \"\", direct)",
"progress_bar (bool): True if progress bar should be added to gif \"\"\" print(\"creating",
"+ \" > \" + directLong + \"/vis/out.gif\", shell=True) if output != 0:",
"exist_ok=True) subprocess.call( [\"Rscript\", \"gg.R\", \"-d\", directLong, \"-t\", str(number_ticks)]) def make_gif(direct, shrink_factor=4, skip_rate=2, interval=0.1,",
"check\") if __name__ == \"__main__\": parser = argparse.ArgumentParser(formatter_class=arg_metav_formatter) required = parser.add_argument_group(\"required name arguments\")",
"+ \" does not exist\") # get sorted image list sorted_list = sorted_alphanumeric(glob.glob(directLong",
"from images Args: direct (str): base directory of logged model shrink_factor (int): factor",
"if output != 0: sys.exit(\"error occurred with gif progress bar, do manual check\")",
"are of same size size = Image.open(sorted_list[0]).size new_size = tuple([int(el / shrink_factor) for",
"gif interval (float): temporal interval for gif construction or speed until (int): upper",
"= [ Image.open(img).resize(new_size, Image.ANTIALIAS) for i, img in enumerate(tqdm(sorted_list)) if ((i + 1)",
"upper limit for epoch to be used in gif construction progress_bar (bool): True",
"parser.add_argument(\"--until\", type=int, default=None, help=\"set upper epoch limit for gif creation,\" + \" applies",
"key: [convert(c) for c in re.split('([0-9]+)', key)] return sorted(data, key=alphanum_key) def make_plot(direct, number_ticks):",
"el in size]) if isinstance(until, int): sorted_list = sorted_list[:until] sorted_list = [ Image.open(img).resize(new_size,",
"applies only when --create-gif is supplied\") parser.add_argument( \"--progress-bar\", default=False, action=\"store_true\", help=\"option to add",
"file Args: direct (str): base directory of logged model number_ticks (int): number of",
"images, applies only when --create-gif is supplied\") parser.add_argument(\"--until\", type=int, default=None, help=\"set upper epoch",
"supplied\") parser.add_argument(\"--until\", type=int, default=None, help=\"set upper epoch limit for gif creation,\" + \"",
"sorted list \"\"\" convert = lambda text: int(text) if text.isdigit() else text.lower() alphanum_key",
"key)] return sorted(data, key=alphanum_key) def make_plot(direct, number_ticks): \"\"\" Function to plot values from",
"if progress_bar: print(\"adding progress bar to gif\") output = subprocess.call(\"cat \" + directLong",
"type=int, default=None, help=\"set upper epoch limit for gif creation,\" + \" applies only",
"gifs from\" + \" images, applies only when --create-gif is supplied\") parser.add_argument(\"--until\", type=int,",
"(list): sorted list \"\"\" convert = lambda text: int(text) if text.isdigit() else text.lower()",
"imageio import argparse import subprocess import numpy as np from tqdm import tqdm",
"to plot values from log csv file Args: direct (str): base directory of",
"Args: data (list): list of strings to sort Returns: (list): sorted list \"\"\"",
"logged model number_ticks (int): number of ticks to have on graph \"\"\" direct",
"strings to sort Returns: (list): sorted list \"\"\" convert = lambda text: int(text)",
"int): sorted_list = sorted_list[:until] sorted_list = [ Image.open(img).resize(new_size, Image.ANTIALIAS) for i, img in",
"construction progress_bar (bool): True if progress bar should be added to gif \"\"\"",
"help=\"base directory within pickles from which to\" + \" visualize\") parser.add_argument(\"--number-ticks\", type=int, default=10,",
"for epoch to be used in gif construction progress_bar (bool): True if progress",
"from pygifsicle import optimize from obj.arg_formatter import arg_metav_formatter def sorted_alphanumeric(data): \"\"\" Function to",
"\"only when --create-gif is supplied; check readme for\" + \" additional go package",
"progress bar to gifs, applies\" + \"only when --create-gif is supplied; check readme",
"exist\") # make vis directory within log directory os.makedirs(directLong + \"/vis\", exist_ok=True) subprocess.call(",
"0: sys.exit(\"error occurred with gif progress bar, do manual check\") if __name__ ==",
"for gif construction or speed until (int): upper limit for epoch to be",
"number-containing strings Args: data (list): list of strings to sort Returns: (list): sorted",
"action=\"store_true\", help=\"option to activate gif creation\") parser.add_argument(\"--shrink-factor\", type=int, default=4, help=\"shrinking factor for images,",
"Image.open(img).resize(new_size, Image.ANTIALIAS) for i, img in enumerate(tqdm(sorted_list)) if ((i + 1) % skip_rate"
] |
[
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,",
"Team' __version__ = '0.0.5' from django import forms from django.utils.safestring import mark_safe from",
"_ from djangoerp.core.forms.auth import UserForm class UserRegistrationForm(UserForm): \"\"\"Form for user registration. \"\"\" def",
"MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
"PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS",
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS",
"OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" __author__ = '<NAME>",
"hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff') if hasattr(self.fields, 'is_active'): self.fields.pop('is_active') if hasattr(self.fields, 'is_superuser'): self.fields.pop('is_superuser') if hasattr(self.fields,",
"OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN",
"IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED",
"WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF",
"class UserRegistrationForm(UserForm): \"\"\"Form for user registration. \"\"\" def __init__(self, *args, **kwargs): super(UserRegistrationForm, self).__init__(*args,",
"OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH",
"djangoerp.core.forms.auth import UserForm class UserRegistrationForm(UserForm): \"\"\"Form for user registration. \"\"\" def __init__(self, *args,",
"for user registration. \"\"\" def __init__(self, *args, **kwargs): super(UserRegistrationForm, self).__init__(*args, **kwargs) # Improved",
"-*- from __future__ import unicode_literals \"\"\"This file is part of the django ERP",
"security. if hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin') if hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff') if hasattr(self.fields, 'is_active'): self.fields.pop('is_active')",
"__version__ = '0.0.5' from django import forms from django.utils.safestring import mark_safe from django.utils.translation",
"super(UserRegistrationForm, self).__init__(*args, **kwargs) # Improved security. if hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin') if hasattr(self.fields, 'is_staff'):",
"OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE",
"__copyright__ = 'Copyright (c) 2013-2015, django ERP Team' __version__ = '0.0.5' from django",
"ERP Team' __version__ = '0.0.5' from django import forms from django.utils.safestring import mark_safe",
"django ERP Team' __version__ = '0.0.5' from django import forms from django.utils.safestring import",
"THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO",
"the django ERP project. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF",
"is part of the django ERP project. THE SOFTWARE IS PROVIDED \"AS IS\",",
"\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT",
"OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER",
"'is_active'): self.fields.pop('is_active') if hasattr(self.fields, 'is_superuser'): self.fields.pop('is_superuser') if hasattr(self.fields, 'groups'): self.fields.pop('groups') if hasattr(self.fields, 'user_permissions'):",
"project. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS",
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
"IN THE SOFTWARE. \"\"\" __author__ = '<NAME> <<EMAIL>>' __copyright__ = 'Copyright (c) 2013-2015,",
"OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING",
"CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR",
"SOFTWARE. \"\"\" __author__ = '<NAME> <<EMAIL>>' __copyright__ = 'Copyright (c) 2013-2015, django ERP",
"\"\"\" __author__ = '<NAME> <<EMAIL>>' __copyright__ = 'Copyright (c) 2013-2015, django ERP Team'",
"(c) 2013-2015, django ERP Team' __version__ = '0.0.5' from django import forms from",
"from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from djangoerp.core.forms.auth import",
"import ugettext as _ from djangoerp.core.forms.auth import UserForm class UserRegistrationForm(UserForm): \"\"\"Form for user",
"file is part of the django ERP project. THE SOFTWARE IS PROVIDED \"AS",
"of the django ERP project. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY",
"THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" __author__",
"from __future__ import unicode_literals \"\"\"This file is part of the django ERP project.",
"EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES",
"WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\"",
"CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.",
"DEALINGS IN THE SOFTWARE. \"\"\" __author__ = '<NAME> <<EMAIL>>' __copyright__ = 'Copyright (c)",
"THE SOFTWARE. \"\"\" __author__ = '<NAME> <<EMAIL>>' __copyright__ = 'Copyright (c) 2013-2015, django",
"# -*- coding: utf-8 -*- from __future__ import unicode_literals \"\"\"This file is part",
"if hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff') if hasattr(self.fields, 'is_active'): self.fields.pop('is_active') if hasattr(self.fields, 'is_superuser'): self.fields.pop('is_superuser') if",
"FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR",
"\"\"\"This file is part of the django ERP project. THE SOFTWARE IS PROVIDED",
"BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE",
"THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
"SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,",
"-*- coding: utf-8 -*- from __future__ import unicode_literals \"\"\"This file is part of",
"<filename>djangoerp/registration/forms.py #!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import unicode_literals \"\"\"This",
"django import forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _",
"HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN",
"user registration. \"\"\" def __init__(self, *args, **kwargs): super(UserRegistrationForm, self).__init__(*args, **kwargs) # Improved security.",
"ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT",
"DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,",
"USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" __author__ = '<NAME> <<EMAIL>>' __copyright__",
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT",
"registration. \"\"\" def __init__(self, *args, **kwargs): super(UserRegistrationForm, self).__init__(*args, **kwargs) # Improved security. if",
"__future__ import unicode_literals \"\"\"This file is part of the django ERP project. THE",
"WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT",
"THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" __author__ = '<NAME> <<EMAIL>>'",
"'0.0.5' from django import forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext",
"NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR",
"LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF",
"ugettext as _ from djangoerp.core.forms.auth import UserForm class UserRegistrationForm(UserForm): \"\"\"Form for user registration.",
"__init__(self, *args, **kwargs): super(UserRegistrationForm, self).__init__(*args, **kwargs) # Improved security. if hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin')",
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A",
"from django import forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext as",
"ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION",
"import forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from",
"part of the django ERP project. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT",
"SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" __author__ =",
"TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE",
"django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from djangoerp.core.forms.auth import UserForm",
"coding: utf-8 -*- from __future__ import unicode_literals \"\"\"This file is part of the",
"UserForm class UserRegistrationForm(UserForm): \"\"\"Form for user registration. \"\"\" def __init__(self, *args, **kwargs): super(UserRegistrationForm,",
"\"\"\" def __init__(self, *args, **kwargs): super(UserRegistrationForm, self).__init__(*args, **kwargs) # Improved security. if hasattr(self.fields,",
"NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,",
"TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN",
"= '<NAME> <<EMAIL>>' __copyright__ = 'Copyright (c) 2013-2015, django ERP Team' __version__ =",
"import mark_safe from django.utils.translation import ugettext as _ from djangoerp.core.forms.auth import UserForm class",
"self.fields.pop('is_staff') if hasattr(self.fields, 'is_active'): self.fields.pop('is_active') if hasattr(self.fields, 'is_superuser'): self.fields.pop('is_superuser') if hasattr(self.fields, 'groups'): self.fields.pop('groups')",
"LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.",
"KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
"IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
"OTHER DEALINGS IN THE SOFTWARE. \"\"\" __author__ = '<NAME> <<EMAIL>>' __copyright__ = 'Copyright",
"PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT",
"OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR",
"IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR",
"AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE",
"hasattr(self.fields, 'is_active'): self.fields.pop('is_active') if hasattr(self.fields, 'is_superuser'): self.fields.pop('is_superuser') if hasattr(self.fields, 'groups'): self.fields.pop('groups') if hasattr(self.fields,",
"IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING",
"OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
"ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF",
"A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT",
"WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO",
"BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION",
"EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS",
"PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE",
"self).__init__(*args, **kwargs) # Improved security. if hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin') if hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff')",
"2013-2015, django ERP Team' __version__ = '0.0.5' from django import forms from django.utils.safestring",
"IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY",
"*args, **kwargs): super(UserRegistrationForm, self).__init__(*args, **kwargs) # Improved security. if hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin') if",
"NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND",
"self.fields.pop('is_active') if hasattr(self.fields, 'is_superuser'): self.fields.pop('is_superuser') if hasattr(self.fields, 'groups'): self.fields.pop('groups') if hasattr(self.fields, 'user_permissions'): self.fields.pop('user_permissions')",
"if hasattr(self.fields, 'is_active'): self.fields.pop('is_active') if hasattr(self.fields, 'is_superuser'): self.fields.pop('is_superuser') if hasattr(self.fields, 'groups'): self.fields.pop('groups') if",
"SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR",
"FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,",
"<<EMAIL>>' __copyright__ = 'Copyright (c) 2013-2015, django ERP Team' __version__ = '0.0.5' from",
"mark_safe from django.utils.translation import ugettext as _ from djangoerp.core.forms.auth import UserForm class UserRegistrationForm(UserForm):",
"import UserForm class UserRegistrationForm(UserForm): \"\"\"Form for user registration. \"\"\" def __init__(self, *args, **kwargs):",
"ERP project. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,",
"def __init__(self, *args, **kwargs): super(UserRegistrationForm, self).__init__(*args, **kwargs) # Improved security. if hasattr(self.fields, 'is_admin'):",
"from djangoerp.core.forms.auth import UserForm class UserRegistrationForm(UserForm): \"\"\"Form for user registration. \"\"\" def __init__(self,",
"= 'Copyright (c) 2013-2015, django ERP Team' __version__ = '0.0.5' from django import",
"UserRegistrationForm(UserForm): \"\"\"Form for user registration. \"\"\" def __init__(self, *args, **kwargs): super(UserRegistrationForm, self).__init__(*args, **kwargs)",
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER",
"\"\"\"Form for user registration. \"\"\" def __init__(self, *args, **kwargs): super(UserRegistrationForm, self).__init__(*args, **kwargs) #",
"'<NAME> <<EMAIL>>' __copyright__ = 'Copyright (c) 2013-2015, django ERP Team' __version__ = '0.0.5'",
"CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE",
"OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES",
"python # -*- coding: utf-8 -*- from __future__ import unicode_literals \"\"\"This file is",
"= '0.0.5' from django import forms from django.utils.safestring import mark_safe from django.utils.translation import",
"self.fields.pop('is_admin') if hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff') if hasattr(self.fields, 'is_active'): self.fields.pop('is_active') if hasattr(self.fields, 'is_superuser'): self.fields.pop('is_superuser')",
"forms from django.utils.safestring import mark_safe from django.utils.translation import ugettext as _ from djangoerp.core.forms.auth",
"Improved security. if hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin') if hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff') if hasattr(self.fields, 'is_active'):",
"OR OTHER DEALINGS IN THE SOFTWARE. \"\"\" __author__ = '<NAME> <<EMAIL>>' __copyright__ =",
"OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR",
"import unicode_literals \"\"\"This file is part of the django ERP project. THE SOFTWARE",
"if hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin') if hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff') if hasattr(self.fields, 'is_active'): self.fields.pop('is_active') if",
"OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS",
"COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN",
"from django.utils.translation import ugettext as _ from djangoerp.core.forms.auth import UserForm class UserRegistrationForm(UserForm): \"\"\"Form",
"'is_staff'): self.fields.pop('is_staff') if hasattr(self.fields, 'is_active'): self.fields.pop('is_active') if hasattr(self.fields, 'is_superuser'): self.fields.pop('is_superuser') if hasattr(self.fields, 'groups'):",
"hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin') if hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff') if hasattr(self.fields, 'is_active'): self.fields.pop('is_active') if hasattr(self.fields,",
"FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR",
"INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR",
"__author__ = '<NAME> <<EMAIL>>' __copyright__ = 'Copyright (c) 2013-2015, django ERP Team' __version__",
"ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE",
"'Copyright (c) 2013-2015, django ERP Team' __version__ = '0.0.5' from django import forms",
"django.utils.translation import ugettext as _ from djangoerp.core.forms.auth import UserForm class UserRegistrationForm(UserForm): \"\"\"Form for",
"django ERP project. THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY",
"**kwargs) # Improved security. if hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin') if hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff') if",
"OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL",
"as _ from djangoerp.core.forms.auth import UserForm class UserRegistrationForm(UserForm): \"\"\"Form for user registration. \"\"\"",
"# Improved security. if hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin') if hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff') if hasattr(self.fields,",
"AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN",
"'is_admin'): self.fields.pop('is_admin') if hasattr(self.fields, 'is_staff'): self.fields.pop('is_staff') if hasattr(self.fields, 'is_active'): self.fields.pop('is_active') if hasattr(self.fields, 'is_superuser'):",
"#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import unicode_literals \"\"\"This file",
"WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE",
"unicode_literals \"\"\"This file is part of the django ERP project. THE SOFTWARE IS",
"utf-8 -*- from __future__ import unicode_literals \"\"\"This file is part of the django",
"**kwargs): super(UserRegistrationForm, self).__init__(*args, **kwargs) # Improved security. if hasattr(self.fields, 'is_admin'): self.fields.pop('is_admin') if hasattr(self.fields,"
] |
[
"[5, 2, 3, 4, 5, 6, 2] assert sort_with_reverse(l) == [2, 2, 3,",
"4] l = [5, 2, 3, 4, 5, 6, 2] assert sort_with_reverse(l) ==",
"j][::-1] def sort_with_reverse(lst:list): iterator = 0 while iterator < len(lst)-1: smallest_value = min(lst[iterator:])",
"l = [3, 2, 4, 1] assert sort_with_reverse(l) == [1, 2, 3, 4]",
"using this method: reverse(lst, i, j), which reverses lst from i to j.",
"index_smallest = lst.index(smallest_value, iterator, len(lst)) reverse(lst, iterator, index_smallest+1) iterator += 1 return lst",
"4, 5, 6, 2] assert sort_with_reverse(l) == [2, 2, 3, 4, 5, 5,",
"index_smallest+1) iterator += 1 return lst if __name__ == '__main__': l = [3,",
"while iterator < len(lst)-1: smallest_value = min(lst[iterator:]) index_smallest = lst.index(smallest_value, iterator, len(lst)) reverse(lst,",
"= [3, 2, 4, 1] assert sort_with_reverse(l) == [1, 2, 3, 4] l",
"lst from i to j. \"\"\" def reverse(lst, i, j): lst[i: j] =",
"l = [5, 2, 3, 4, 5, 6, 2] assert sort_with_reverse(l) == [2,",
"0 while iterator < len(lst)-1: smallest_value = min(lst[iterator:]) index_smallest = lst.index(smallest_value, iterator, len(lst))",
"2, 4, 1] assert sort_with_reverse(l) == [1, 2, 3, 4] l = [5,",
"j), which reverses lst from i to j. \"\"\" def reverse(lst, i, j):",
"j] = lst[i: j][::-1] def sort_with_reverse(lst:list): iterator = 0 while iterator < len(lst)-1:",
"iterator, len(lst)) reverse(lst, iterator, index_smallest+1) iterator += 1 return lst if __name__ ==",
"from i to j. \"\"\" def reverse(lst, i, j): lst[i: j] = lst[i:",
"i to j. \"\"\" def reverse(lst, i, j): lst[i: j] = lst[i: j][::-1]",
"1 return lst if __name__ == '__main__': l = [3, 2, 4, 1]",
"iterator = 0 while iterator < len(lst)-1: smallest_value = min(lst[iterator:]) index_smallest = lst.index(smallest_value,",
"def reverse(lst, i, j): lst[i: j] = lst[i: j][::-1] def sort_with_reverse(lst:list): iterator =",
"a list, sort it using this method: reverse(lst, i, j), which reverses lst",
"iterator += 1 return lst if __name__ == '__main__': l = [3, 2,",
"reverse(lst, iterator, index_smallest+1) iterator += 1 return lst if __name__ == '__main__': l",
"Given a list, sort it using this method: reverse(lst, i, j), which reverses",
"2, 3, 4] l = [5, 2, 3, 4, 5, 6, 2] assert",
"sort it using this method: reverse(lst, i, j), which reverses lst from i",
"4, 1] assert sort_with_reverse(l) == [1, 2, 3, 4] l = [5, 2,",
"return lst if __name__ == '__main__': l = [3, 2, 4, 1] assert",
"lst.index(smallest_value, iterator, len(lst)) reverse(lst, iterator, index_smallest+1) iterator += 1 return lst if __name__",
"reverse(lst, i, j), which reverses lst from i to j. \"\"\" def reverse(lst,",
"reverses lst from i to j. \"\"\" def reverse(lst, i, j): lst[i: j]",
"which reverses lst from i to j. \"\"\" def reverse(lst, i, j): lst[i:",
"if __name__ == '__main__': l = [3, 2, 4, 1] assert sort_with_reverse(l) ==",
"3, 4] l = [5, 2, 3, 4, 5, 6, 2] assert sort_with_reverse(l)",
"lst[i: j] = lst[i: j][::-1] def sort_with_reverse(lst:list): iterator = 0 while iterator <",
"\"\"\" Given a list, sort it using this method: reverse(lst, i, j), which",
"len(lst)-1: smallest_value = min(lst[iterator:]) index_smallest = lst.index(smallest_value, iterator, len(lst)) reverse(lst, iterator, index_smallest+1) iterator",
"list, sort it using this method: reverse(lst, i, j), which reverses lst from",
"[1, 2, 3, 4] l = [5, 2, 3, 4, 5, 6, 2]",
"3, 4, 5, 6, 2] assert sort_with_reverse(l) == [2, 2, 3, 4, 5,",
"j): lst[i: j] = lst[i: j][::-1] def sort_with_reverse(lst:list): iterator = 0 while iterator",
"iterator, index_smallest+1) iterator += 1 return lst if __name__ == '__main__': l =",
"2, 3, 4, 5, 6, 2] assert sort_with_reverse(l) == [2, 2, 3, 4,",
"len(lst)) reverse(lst, iterator, index_smallest+1) iterator += 1 return lst if __name__ == '__main__':",
"1] assert sort_with_reverse(l) == [1, 2, 3, 4] l = [5, 2, 3,",
"= lst.index(smallest_value, iterator, len(lst)) reverse(lst, iterator, index_smallest+1) iterator += 1 return lst if",
"sort_with_reverse(l) == [1, 2, 3, 4] l = [5, 2, 3, 4, 5,",
"smallest_value = min(lst[iterator:]) index_smallest = lst.index(smallest_value, iterator, len(lst)) reverse(lst, iterator, index_smallest+1) iterator +=",
"assert sort_with_reverse(l) == [1, 2, 3, 4] l = [5, 2, 3, 4,",
"j. \"\"\" def reverse(lst, i, j): lst[i: j] = lst[i: j][::-1] def sort_with_reverse(lst:list):",
"'__main__': l = [3, 2, 4, 1] assert sort_with_reverse(l) == [1, 2, 3,",
"to j. \"\"\" def reverse(lst, i, j): lst[i: j] = lst[i: j][::-1] def",
"i, j): lst[i: j] = lst[i: j][::-1] def sort_with_reverse(lst:list): iterator = 0 while",
"< len(lst)-1: smallest_value = min(lst[iterator:]) index_smallest = lst.index(smallest_value, iterator, len(lst)) reverse(lst, iterator, index_smallest+1)",
"== [1, 2, 3, 4] l = [5, 2, 3, 4, 5, 6,",
"i, j), which reverses lst from i to j. \"\"\" def reverse(lst, i,",
"sort_with_reverse(lst:list): iterator = 0 while iterator < len(lst)-1: smallest_value = min(lst[iterator:]) index_smallest =",
"[3, 2, 4, 1] assert sort_with_reverse(l) == [1, 2, 3, 4] l =",
"__name__ == '__main__': l = [3, 2, 4, 1] assert sort_with_reverse(l) == [1,",
"5, 6, 2] assert sort_with_reverse(l) == [2, 2, 3, 4, 5, 5, 6]",
"iterator < len(lst)-1: smallest_value = min(lst[iterator:]) index_smallest = lst.index(smallest_value, iterator, len(lst)) reverse(lst, iterator,",
"it using this method: reverse(lst, i, j), which reverses lst from i to",
"min(lst[iterator:]) index_smallest = lst.index(smallest_value, iterator, len(lst)) reverse(lst, iterator, index_smallest+1) iterator += 1 return",
"+= 1 return lst if __name__ == '__main__': l = [3, 2, 4,",
"def sort_with_reverse(lst:list): iterator = 0 while iterator < len(lst)-1: smallest_value = min(lst[iterator:]) index_smallest",
"reverse(lst, i, j): lst[i: j] = lst[i: j][::-1] def sort_with_reverse(lst:list): iterator = 0",
"lst[i: j][::-1] def sort_with_reverse(lst:list): iterator = 0 while iterator < len(lst)-1: smallest_value =",
"= 0 while iterator < len(lst)-1: smallest_value = min(lst[iterator:]) index_smallest = lst.index(smallest_value, iterator,",
"= lst[i: j][::-1] def sort_with_reverse(lst:list): iterator = 0 while iterator < len(lst)-1: smallest_value",
"== '__main__': l = [3, 2, 4, 1] assert sort_with_reverse(l) == [1, 2,",
"this method: reverse(lst, i, j), which reverses lst from i to j. \"\"\"",
"lst if __name__ == '__main__': l = [3, 2, 4, 1] assert sort_with_reverse(l)",
"= [5, 2, 3, 4, 5, 6, 2] assert sort_with_reverse(l) == [2, 2,",
"\"\"\" def reverse(lst, i, j): lst[i: j] = lst[i: j][::-1] def sort_with_reverse(lst:list): iterator",
"= min(lst[iterator:]) index_smallest = lst.index(smallest_value, iterator, len(lst)) reverse(lst, iterator, index_smallest+1) iterator += 1",
"method: reverse(lst, i, j), which reverses lst from i to j. \"\"\" def"
] |
[
"db.Column(db.String(120), nullable=False) date = db.Column(db.String(12), nullable=True) class Checkout(db.Model): sno = db.Column(db.Integer, primary_key=True) name",
"class Specification(db.Model): sno = db.Column(db.Integer, primary_key=True) size = db.Column(db.Integer, nullable=False) color = db.Column(db.String(25),",
"Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\" db = SQLAlchemy(app) class Contacts(db.Model): ''' sno name email",
"db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) card = db.Column(db.Integer, nullable=True) cvv = db.Column(db.Integer,",
"db.Column(db.Integer, nullable=False) color = db.Column(db.String(25), nullable=True) delivery = db.Column(db.String(25), nullable=False) @app.route('/') def home():",
"phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('checkout.html') @app.route(\"/register\",",
"def check(): if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry)",
"register(): if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit() return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def",
"render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def specification(): if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit() return render_template('specification.html')",
"name = db.Column(db.String(25), nullable=False) phone = db.Column(db.Integer, nullable=True) email = db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False)",
"if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit() return render_template('payment.html') @app.route('/category',methods=['GET','POST']) def category(): return",
"primary_key=True) size = db.Column(db.Integer, nullable=False) color = db.Column(db.String(25), nullable=True) delivery = db.Column(db.String(25), nullable=False)",
"flask_sqlalchemy import SQLAlchemy from datetime import datetime import pymysql pymysql.install_as_MySQLdb() app = Flask(__name__)",
"phone = db.Column(db.String(20), nullable=False) email = db.Column(db.String(20), nullable=False) country = db.Column(db.String(12), nullable=False) address=",
"db.session.commit() return render_template('payment.html') @app.route('/category',methods=['GET','POST']) def category(): return render_template('category.html') @app.route('/product_detail') def productdetail(): return render_template('product_detail.html')",
"@app.route('/category',methods=['GET','POST']) def category(): return render_template('category.html') @app.route('/product_detail') def productdetail(): return render_template('product_detail.html') @app.route('/search') def search():",
"db.Column(db.Integer, primary_key=True) size = db.Column(db.Integer, nullable=False) color = db.Column(db.String(25), nullable=True) delivery = db.Column(db.String(25),",
"@app.route('/search') def search(): return render_template('search.html') @app.route('/contact', methods= ['GET', 'POST']) def contact(): if(request.method=='POST'): '''add",
"db.Column(db.String(25), nullable=False) @app.route('/') def home(): return render_template('index.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/cart')",
"['GET', 'POST']) def contact(): if(request.method=='POST'): '''add entry to database''' name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email,",
"app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\" db = SQLAlchemy(app) class Contacts(db.Model): ''' sno name email mes",
"class Contacts(db.Model): ''' sno name email mes date ''' sno = db.Column(db.Integer, primary_key=True)",
"db.Column(db.String(20), nullable=False) date = db.Column(db.String(12), nullable=True) class Register(db.Model): sno = db.Column(db.Integer, primary_key=True) name",
"category(): return render_template('category.html') @app.route('/product_detail') def productdetail(): return render_template('product_detail.html') @app.route('/search') def search(): return render_template('search.html')",
"entry to database''' name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('contact.html') @app.route('/product')",
"nullable=False) phone = db.Column(db.String(20), nullable=False) email = db.Column(db.String(20), nullable=False) country = db.Column(db.String(12), nullable=False)",
"= db.Column(db.String(25), nullable=True) delivery = db.Column(db.String(25), nullable=False) @app.route('/') def home(): return render_template('index.html') @app.route('/about')",
"methods= ['GET', 'POST']) def check(): if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city')",
"db.session.commit() return render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def payment(): if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit()",
"@app.route('/specification',methods=['GET','POST']) def specification(): if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit() return render_template('specification.html') @app.route('/payment',methods=['GET','POST'])",
"nullable=False) address= db.Column(db.String(120), nullable=False) postcode= db.Column(db.String(20), nullable=False) city= db.Column(db.String(20), nullable=False) date = db.Column(db.String(12),",
"render_template('index.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/cart') def cart(): return render_template('cart.html') @app.route('/checkout', methods=",
"sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.String(20), nullable=False) email",
"@app.route('/payment',methods=['GET','POST']) def payment(): if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit() return render_template('payment.html') @app.route('/category',methods=['GET','POST'])",
"card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit() return render_template('payment.html') @app.route('/category',methods=['GET','POST']) def category(): return render_template('category.html') @app.route('/product_detail')",
"primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.Integer, nullable=True) email = db.Column(db.String(20), nullable=False)",
"render_template('search.html') @app.route('/contact', methods= ['GET', 'POST']) def contact(): if(request.method=='POST'): '''add entry to database''' name=request.form.get('name')",
"import pymysql pymysql.install_as_MySQLdb() app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\" db = SQLAlchemy(app) class",
"= Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\" db = SQLAlchemy(app) class Contacts(db.Model): ''' sno name",
"nullable=False) color = db.Column(db.String(25), nullable=True) delivery = db.Column(db.String(25), nullable=False) @app.route('/') def home(): return",
"contact(): if(request.method=='POST'): '''add entry to database''' name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now()) db.session.add(entry) db.session.commit()",
"about(): return render_template('about.html') @app.route('/cart') def cart(): return render_template('cart.html') @app.route('/checkout', methods= ['GET', 'POST']) def",
"def home(): return render_template('index.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/cart') def cart(): return",
"search(): return render_template('search.html') @app.route('/contact', methods= ['GET', 'POST']) def contact(): if(request.method=='POST'): '''add entry to",
"postcode= db.Column(db.String(20), nullable=False) city= db.Column(db.String(20), nullable=False) date = db.Column(db.String(12), nullable=True) class Register(db.Model): sno",
"card = db.Column(db.Integer, nullable=True) cvv = db.Column(db.Integer, nullable=False) class Specification(db.Model): sno = db.Column(db.Integer,",
"nullable=False) date = db.Column(db.String(12), nullable=True) class Register(db.Model): sno = db.Column(db.Integer, primary_key=True) name =",
"= db.Column(db.String(25), nullable=False) card = db.Column(db.Integer, nullable=True) cvv = db.Column(db.Integer, nullable=False) class Specification(db.Model):",
"nullable=False) @app.route('/') def home(): return render_template('index.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/cart') def",
"mes = db.Column(db.String(120), nullable=False) date = db.Column(db.String(12), nullable=True) class Checkout(db.Model): sno = db.Column(db.Integer,",
"nullable=False) phone = db.Column(db.Integer, nullable=True) email = db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False) class Payment(db.Model): sno",
"def productdetail(): return render_template('product_detail.html') @app.route('/search') def search(): return render_template('search.html') @app.route('/contact', methods= ['GET', 'POST'])",
"def cart(): return render_template('cart.html') @app.route('/checkout', methods= ['GET', 'POST']) def check(): if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone')",
"'POST']) def contact(): if(request.method=='POST'): '''add entry to database''' name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now())",
"= db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False) class Payment(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25),",
"def about(): return render_template('about.html') @app.route('/cart') def cart(): return render_template('cart.html') @app.route('/checkout', methods= ['GET', 'POST'])",
"pymysql.install_as_MySQLdb() app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\" db = SQLAlchemy(app) class Contacts(db.Model): '''",
"'POST']) def check(): if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now())",
"class Register(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.Integer,",
"= db.Column(db.String(25), nullable=False) phone = db.Column(db.String(20), nullable=False) email = db.Column(db.String(20), nullable=False) country =",
"nullable=True) class Checkout(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone =",
"import datetime import pymysql pymysql.install_as_MySQLdb() app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\" db =",
"size = db.Column(db.Integer, nullable=False) color = db.Column(db.String(25), nullable=True) delivery = db.Column(db.String(25), nullable=False) @app.route('/')",
"= db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.String(20), nullable=False) email =",
"= db.Column(db.String(25), nullable=False) @app.route('/') def home(): return render_template('index.html') @app.route('/about') def about(): return render_template('about.html')",
"= db.Column(db.String(12), nullable=False) address= db.Column(db.String(120), nullable=False) postcode= db.Column(db.String(20), nullable=False) city= db.Column(db.String(20), nullable=False) date",
"render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def payment(): if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit() return render_template('payment.html')",
"def register(): if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit() return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST'])",
"name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('contact.html') @app.route('/product') def product(): return",
"= db.Column(db.Integer, nullable=False) color = db.Column(db.String(25), nullable=True) delivery = db.Column(db.String(25), nullable=False) @app.route('/') def",
"if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit() return render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def payment(): if(request.method=='POST'):",
"nullable=False) class Specification(db.Model): sno = db.Column(db.Integer, primary_key=True) size = db.Column(db.Integer, nullable=False) color =",
"return render_template('index.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/cart') def cart(): return render_template('cart.html') @app.route('/checkout',",
"name email mes date ''' sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False)",
"date = db.Column(db.String(12), nullable=True) class Register(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25),",
"\"mysql://root:@localhost/shoppingwebapp\" db = SQLAlchemy(app) class Contacts(db.Model): ''' sno name email mes date '''",
"city= db.Column(db.String(20), nullable=False) date = db.Column(db.String(12), nullable=True) class Register(db.Model): sno = db.Column(db.Integer, primary_key=True)",
"name = db.Column(db.String(25), nullable=False) card = db.Column(db.Integer, nullable=True) cvv = db.Column(db.Integer, nullable=False) class",
"from flask_sqlalchemy import SQLAlchemy from datetime import datetime import pymysql pymysql.install_as_MySQLdb() app =",
"import SQLAlchemy from datetime import datetime import pymysql pymysql.install_as_MySQLdb() app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI']",
"methods=[\"GET\", \"POST\"]) def register(): if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit() return",
"country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\", \"POST\"]) def register(): if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email')",
"render_template, request from flask_sqlalchemy import SQLAlchemy from datetime import datetime import pymysql pymysql.install_as_MySQLdb()",
"nullable=True) cvv = db.Column(db.Integer, nullable=False) class Specification(db.Model): sno = db.Column(db.Integer, primary_key=True) size =",
"db.Column(db.String(12), nullable=False) address= db.Column(db.String(120), nullable=False) postcode= db.Column(db.String(20), nullable=False) city= db.Column(db.String(20), nullable=False) date =",
"Flask, render_template, request from flask_sqlalchemy import SQLAlchemy from datetime import datetime import pymysql",
"= db.Column(db.String(20), nullable=False) country = db.Column(db.String(12), nullable=False) address= db.Column(db.String(120), nullable=False) postcode= db.Column(db.String(20), nullable=False)",
"entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit() return render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def payment(): if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv)",
"db.session.add(entry) db.session.commit() return render_template('payment.html') @app.route('/category',methods=['GET','POST']) def category(): return render_template('category.html') @app.route('/product_detail') def productdetail(): return",
"= db.Column(db.Integer, nullable=False) class Specification(db.Model): sno = db.Column(db.Integer, primary_key=True) size = db.Column(db.Integer, nullable=False)",
"def search(): return render_template('search.html') @app.route('/contact', methods= ['GET', 'POST']) def contact(): if(request.method=='POST'): '''add entry",
"nullable=False) country = db.Column(db.String(12), nullable=False) address= db.Column(db.String(120), nullable=False) postcode= db.Column(db.String(20), nullable=False) city= db.Column(db.String(20),",
"password=db.Column(db.String(20),nullable=False) class Payment(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) card =",
"city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\", \"POST\"]) def register(): if(request.method=='POST'):",
"methods= ['GET', 'POST']) def contact(): if(request.method=='POST'): '''add entry to database''' name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg')",
"color = db.Column(db.String(25), nullable=True) delivery = db.Column(db.String(25), nullable=False) @app.route('/') def home(): return render_template('index.html')",
"database''' name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('contact.html') @app.route('/product') def product():",
"country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\", \"POST\"])",
"name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('checkout.html')",
"app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\" db = SQLAlchemy(app) class Contacts(db.Model): ''' sno",
"db.Column(db.String(20), nullable=False) email = db.Column(db.String(20), nullable=False) country = db.Column(db.String(12), nullable=False) address= db.Column(db.String(120), nullable=False)",
"= db.Column(db.String(25), nullable=False) phone = db.Column(db.Integer, nullable=True) email = db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False) class",
"= db.Column(db.Integer, nullable=True) email = db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False) class Payment(db.Model): sno = db.Column(db.Integer,",
"db.Column(db.Integer, nullable=True) email = db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False) class Payment(db.Model): sno = db.Column(db.Integer, primary_key=True)",
"name=request.form.get('name') email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit() return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def specification(): if(request.method=='POST'):",
"specification(): if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit() return render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def payment():",
"def category(): return render_template('category.html') @app.route('/product_detail') def productdetail(): return render_template('product_detail.html') @app.route('/search') def search(): return",
"db.Column(db.String(12), nullable=True) class Register(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone",
"email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('contact.html') @app.route('/product') def product(): return render_template('product.html')",
"= db.Column(db.String(12), nullable=True) class Checkout(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False)",
"nullable=True) delivery = db.Column(db.String(25), nullable=False) @app.route('/') def home(): return render_template('index.html') @app.route('/about') def about():",
"render_template('category.html') @app.route('/product_detail') def productdetail(): return render_template('product_detail.html') @app.route('/search') def search(): return render_template('search.html') @app.route('/contact', methods=",
"db.Column(db.Integer, nullable=True) cvv = db.Column(db.Integer, nullable=False) class Specification(db.Model): sno = db.Column(db.Integer, primary_key=True) size",
"primary_key=True) name = db.Column(db.String(25), nullable=False) card = db.Column(db.Integer, nullable=True) cvv = db.Column(db.Integer, nullable=False)",
"email = db.Column(db.String(20), nullable=False) country = db.Column(db.String(12), nullable=False) address= db.Column(db.String(120), nullable=False) postcode= db.Column(db.String(20),",
"db.Column(db.String(25), nullable=True) delivery = db.Column(db.String(25), nullable=False) @app.route('/') def home(): return render_template('index.html') @app.route('/about') def",
"db.Column(db.String(20), nullable=False) mes = db.Column(db.String(120), nullable=False) date = db.Column(db.String(12), nullable=True) class Checkout(db.Model): sno",
"if(request.method=='POST'): '''add entry to database''' name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now()) db.session.add(entry) db.session.commit() return",
"Payment(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) card = db.Column(db.Integer, nullable=True)",
"address= db.Column(db.String(120), nullable=False) postcode= db.Column(db.String(20), nullable=False) city= db.Column(db.String(20), nullable=False) date = db.Column(db.String(12), nullable=True)",
"render_template('cart.html') @app.route('/checkout', methods= ['GET', 'POST']) def check(): if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country') address=request.form.get('address')",
"return render_template('payment.html') @app.route('/category',methods=['GET','POST']) def category(): return render_template('category.html') @app.route('/product_detail') def productdetail(): return render_template('product_detail.html') @app.route('/search')",
"''' sno name email mes date ''' sno = db.Column(db.Integer, primary_key=True) name =",
"render_template('payment.html') @app.route('/category',methods=['GET','POST']) def category(): return render_template('category.html') @app.route('/product_detail') def productdetail(): return render_template('product_detail.html') @app.route('/search') def",
"return render_template('product_detail.html') @app.route('/search') def search(): return render_template('search.html') @app.route('/contact', methods= ['GET', 'POST']) def contact():",
"check(): if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit()",
"db.Column(db.String(25), nullable=False) phone = db.Column(db.Integer, nullable=True) email = db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False) class Payment(db.Model):",
"db = SQLAlchemy(app) class Contacts(db.Model): ''' sno name email mes date ''' sno",
"class Payment(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) card = db.Column(db.Integer,",
"@app.route('/checkout', methods= ['GET', 'POST']) def check(): if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode')",
"\"POST\"]) def register(): if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit() return render_template(\"register.html\")",
"country = db.Column(db.String(12), nullable=False) address= db.Column(db.String(120), nullable=False) postcode= db.Column(db.String(20), nullable=False) city= db.Column(db.String(20), nullable=False)",
"db.session.add(entry) db.session.commit() return render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def payment(): if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry)",
"entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit() return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def specification(): if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery)",
"= SQLAlchemy(app) class Contacts(db.Model): ''' sno name email mes date ''' sno =",
"@app.route('/product_detail') def productdetail(): return render_template('product_detail.html') @app.route('/search') def search(): return render_template('search.html') @app.route('/contact', methods= ['GET',",
"from flask import Flask, render_template, request from flask_sqlalchemy import SQLAlchemy from datetime import",
"nullable=False) password=db.Column(db.String(20),nullable=False) class Payment(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) card",
"nullable=False) postcode= db.Column(db.String(20), nullable=False) city= db.Column(db.String(20), nullable=False) date = db.Column(db.String(12), nullable=True) class Register(db.Model):",
"phone = db.Column(db.Integer, nullable=True) email = db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False) class Payment(db.Model): sno =",
"import Flask, render_template, request from flask_sqlalchemy import SQLAlchemy from datetime import datetime import",
"= db.Column(db.String(25), nullable=False) email = db.Column(db.String(20), nullable=False) mes = db.Column(db.String(120), nullable=False) date =",
"Specification(db.Model): sno = db.Column(db.Integer, primary_key=True) size = db.Column(db.Integer, nullable=False) color = db.Column(db.String(25), nullable=True)",
"productdetail(): return render_template('product_detail.html') @app.route('/search') def search(): return render_template('search.html') @app.route('/contact', methods= ['GET', 'POST']) def",
"datetime import datetime import pymysql pymysql.install_as_MySQLdb() app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\" db",
"nullable=False) city= db.Column(db.String(20), nullable=False) date = db.Column(db.String(12), nullable=True) class Register(db.Model): sno = db.Column(db.Integer,",
"request from flask_sqlalchemy import SQLAlchemy from datetime import datetime import pymysql pymysql.install_as_MySQLdb() app",
"nullable=False) card = db.Column(db.Integer, nullable=True) cvv = db.Column(db.Integer, nullable=False) class Specification(db.Model): sno =",
"db.session.add(entry) db.session.commit() return render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\", \"POST\"]) def register(): if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email') password=request.form.get('password')",
"def specification(): if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit() return render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def",
"delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit() return render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def payment(): if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv')",
"return render_template('cart.html') @app.route('/checkout', methods= ['GET', 'POST']) def check(): if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country')",
"['GET', 'POST']) def check(): if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email,",
"<reponame>swathiprabhu3/SwakshaGadgetShop from flask import Flask, render_template, request from flask_sqlalchemy import SQLAlchemy from datetime",
"home(): return render_template('index.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/cart') def cart(): return render_template('cart.html')",
"@app.route('/about') def about(): return render_template('about.html') @app.route('/cart') def cart(): return render_template('cart.html') @app.route('/checkout', methods= ['GET',",
"if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit() return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def specification():",
"= db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) email = db.Column(db.String(20), nullable=False) mes =",
"render_template('product_detail.html') @app.route('/search') def search(): return render_template('search.html') @app.route('/contact', methods= ['GET', 'POST']) def contact(): if(request.method=='POST'):",
"return render_template('search.html') @app.route('/contact', methods= ['GET', 'POST']) def contact(): if(request.method=='POST'): '''add entry to database'''",
"message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('contact.html') @app.route('/product') def product(): return render_template('product.html') app.run(debug=True)",
"address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\", \"POST\"]) def",
"= db.Column(db.String(20), nullable=False) mes = db.Column(db.String(120), nullable=False) date = db.Column(db.String(12), nullable=True) class Checkout(db.Model):",
"@app.route(\"/register\", methods=[\"GET\", \"POST\"]) def register(): if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit()",
"db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.String(20), nullable=False) email = db.Column(db.String(20),",
"db.Column(db.String(20), nullable=False) city= db.Column(db.String(20), nullable=False) date = db.Column(db.String(12), nullable=True) class Register(db.Model): sno =",
"'''add entry to database''' name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('contact.html')",
"return render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def payment(): if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit() return",
"cvv = db.Column(db.Integer, nullable=False) class Specification(db.Model): sno = db.Column(db.Integer, primary_key=True) size = db.Column(db.Integer,",
"if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email') country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit() return",
"= db.Column(db.String(120), nullable=False) date = db.Column(db.String(12), nullable=True) class Checkout(db.Model): sno = db.Column(db.Integer, primary_key=True)",
"db.Column(db.String(25), nullable=False) email = db.Column(db.String(20), nullable=False) mes = db.Column(db.String(120), nullable=False) date = db.Column(db.String(12),",
"password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit() return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def specification(): if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color')",
"db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) email = db.Column(db.String(20), nullable=False) mes = db.Column(db.String(120),",
"phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit() return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def specification(): if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery')",
"nullable=False) email = db.Column(db.String(20), nullable=False) country = db.Column(db.String(12), nullable=False) address= db.Column(db.String(120), nullable=False) postcode=",
"primary_key=True) name = db.Column(db.String(25), nullable=False) email = db.Column(db.String(20), nullable=False) mes = db.Column(db.String(120), nullable=False)",
"flask import Flask, render_template, request from flask_sqlalchemy import SQLAlchemy from datetime import datetime",
"db.Column(db.String(25), nullable=False) card = db.Column(db.Integer, nullable=True) cvv = db.Column(db.Integer, nullable=False) class Specification(db.Model): sno",
"return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def specification(): if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit() return",
"return render_template('category.html') @app.route('/product_detail') def productdetail(): return render_template('product_detail.html') @app.route('/search') def search(): return render_template('search.html') @app.route('/contact',",
"cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit() return render_template('payment.html') @app.route('/category',methods=['GET','POST']) def category(): return render_template('category.html') @app.route('/product_detail') def",
"name = db.Column(db.String(25), nullable=False) phone = db.Column(db.String(20), nullable=False) email = db.Column(db.String(20), nullable=False) country",
"nullable=True) class Register(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone =",
"nullable=True) email = db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False) class Payment(db.Model): sno = db.Column(db.Integer, primary_key=True) name",
"nullable=False) date = db.Column(db.String(12), nullable=True) class Checkout(db.Model): sno = db.Column(db.Integer, primary_key=True) name =",
"Checkout(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.String(20), nullable=False)",
"db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.Integer, nullable=True) email = db.Column(db.String(20),",
"sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) card = db.Column(db.Integer, nullable=True) cvv",
"postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\", \"POST\"]) def register():",
"entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit() return render_template('payment.html') @app.route('/category',methods=['GET','POST']) def category(): return render_template('category.html') @app.route('/product_detail') def productdetail():",
"to database''' name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('contact.html') @app.route('/product') def",
"from datetime import datetime import pymysql pymysql.install_as_MySQLdb() app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\"",
"db.Column(db.String(20), nullable=False) country = db.Column(db.String(12), nullable=False) address= db.Column(db.String(120), nullable=False) postcode= db.Column(db.String(20), nullable=False) city=",
"payment(): if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit() return render_template('payment.html') @app.route('/category',methods=['GET','POST']) def category():",
"db.Column(db.String(25), nullable=False) phone = db.Column(db.String(20), nullable=False) email = db.Column(db.String(20), nullable=False) country = db.Column(db.String(12),",
"= db.Column(db.String(20), nullable=False) email = db.Column(db.String(20), nullable=False) country = db.Column(db.String(12), nullable=False) address= db.Column(db.String(120),",
"db.Column(db.String(120), nullable=False) postcode= db.Column(db.String(20), nullable=False) city= db.Column(db.String(20), nullable=False) date = db.Column(db.String(12), nullable=True) class",
"db.session.commit() return render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\", \"POST\"]) def register(): if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone')",
"name = db.Column(db.String(25), nullable=False) email = db.Column(db.String(20), nullable=False) mes = db.Column(db.String(120), nullable=False) date",
"class Checkout(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.String(20),",
"= db.Column(db.Integer, nullable=True) cvv = db.Column(db.Integer, nullable=False) class Specification(db.Model): sno = db.Column(db.Integer, primary_key=True)",
"email = db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False) class Payment(db.Model): sno = db.Column(db.Integer, primary_key=True) name =",
"nullable=False) mes = db.Column(db.String(120), nullable=False) date = db.Column(db.String(12), nullable=True) class Checkout(db.Model): sno =",
"SQLAlchemy(app) class Contacts(db.Model): ''' sno name email mes date ''' sno = db.Column(db.Integer,",
"size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit() return render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def payment(): if(request.method=='POST'): name=request.form.get('name')",
"= db.Column(db.String(12), nullable=True) class Register(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False)",
"mes date ''' sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) email =",
"@app.route('/') def home(): return render_template('index.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/cart') def cart():",
"sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) email = db.Column(db.String(20), nullable=False) mes",
"datetime import pymysql pymysql.install_as_MySQLdb() app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\" db = SQLAlchemy(app)",
"sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.Integer, nullable=True) email",
"sno = db.Column(db.Integer, primary_key=True) size = db.Column(db.Integer, nullable=False) color = db.Column(db.String(25), nullable=True) delivery",
"color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit() return render_template('specification.html') @app.route('/payment',methods=['GET','POST']) def payment(): if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card')",
"= db.Column(db.Integer, primary_key=True) size = db.Column(db.Integer, nullable=False) color = db.Column(db.String(25), nullable=True) delivery =",
"Contacts(db.Model): ''' sno name email mes date ''' sno = db.Column(db.Integer, primary_key=True) name",
"= db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.Integer, nullable=True) email =",
"return render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\", \"POST\"]) def register(): if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone)",
"= db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) card = db.Column(db.Integer, nullable=True) cvv =",
"email=request.form.get('email') country=request.form.get('country') address=request.form.get('address') postcode=request.form.get('postcode') city=request.form.get('city') entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\",",
"render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\", \"POST\"]) def register(): if(request.method=='POST'): name=request.form.get('name') email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry)",
"name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit() return render_template('payment.html') @app.route('/category',methods=['GET','POST']) def category(): return render_template('category.html')",
"email mes date ''' sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) email",
"= \"mysql://root:@localhost/shoppingwebapp\" db = SQLAlchemy(app) class Contacts(db.Model): ''' sno name email mes date",
"return render_template('about.html') @app.route('/cart') def cart(): return render_template('cart.html') @app.route('/checkout', methods= ['GET', 'POST']) def check():",
"email=request.form.get('email') password=request.form.get('password') phone=request.form.get('phone') entry=Register(name=name,email=email,password=password,phone=phone) db.session.add(entry) db.session.commit() return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def specification(): if(request.method=='POST'): size=request.form.get('size')",
"db.Column(db.Integer, nullable=False) class Specification(db.Model): sno = db.Column(db.Integer, primary_key=True) size = db.Column(db.Integer, nullable=False) color",
"def contact(): if(request.method=='POST'): '''add entry to database''' name=request.form.get('name') email=request.form.get('email') message=request.form.get('msg') entry=Contacts(name=name,email=email, mes=message,date=datetime.now()) db.session.add(entry)",
"entry=Checkout(name=name,phone=phone,email=email, country=country,address=address,postcode=postcode,city=city,date=datetime.now()) db.session.add(entry) db.session.commit() return render_template('checkout.html') @app.route(\"/register\", methods=[\"GET\", \"POST\"]) def register(): if(request.method=='POST'): name=request.form.get('name')",
"primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.String(20), nullable=False) email = db.Column(db.String(20), nullable=False)",
"nullable=False) email = db.Column(db.String(20), nullable=False) mes = db.Column(db.String(120), nullable=False) date = db.Column(db.String(12), nullable=True)",
"date = db.Column(db.String(12), nullable=True) class Checkout(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25),",
"render_template('about.html') @app.route('/cart') def cart(): return render_template('cart.html') @app.route('/checkout', methods= ['GET', 'POST']) def check(): if(request.method=='POST'):",
"SQLAlchemy from datetime import datetime import pymysql pymysql.install_as_MySQLdb() app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] =",
"email = db.Column(db.String(20), nullable=False) mes = db.Column(db.String(120), nullable=False) date = db.Column(db.String(12), nullable=True) class",
"cart(): return render_template('cart.html') @app.route('/checkout', methods= ['GET', 'POST']) def check(): if(request.method=='POST'): name=request.form.get('name') phone=request.form.get('phone') email=request.form.get('email')",
"db.Column(db.String(20), nullable=False) password=db.Column(db.String(20),nullable=False) class Payment(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False)",
"Register(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone = db.Column(db.Integer, nullable=True)",
"date ''' sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) email = db.Column(db.String(20),",
"@app.route('/cart') def cart(): return render_template('cart.html') @app.route('/checkout', methods= ['GET', 'POST']) def check(): if(request.method=='POST'): name=request.form.get('name')",
"pymysql pymysql.install_as_MySQLdb() app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = \"mysql://root:@localhost/shoppingwebapp\" db = SQLAlchemy(app) class Contacts(db.Model):",
"sno name email mes date ''' sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25),",
"db.session.add(entry) db.session.commit() return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def specification(): if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry)",
"db.session.commit() return render_template(\"register.html\") @app.route('/specification',methods=['GET','POST']) def specification(): if(request.method=='POST'): size=request.form.get('size') color=request.form.get('color') delivery=request.form.get('delivery') entry=Specification(size=size,color=color,delivery=delivery) db.session.add(entry) db.session.commit()",
"@app.route('/contact', methods= ['GET', 'POST']) def contact(): if(request.method=='POST'): '''add entry to database''' name=request.form.get('name') email=request.form.get('email')",
"def payment(): if(request.method=='POST'): name=request.form.get('name') card=request.form.get('card') cvv=request.form.get('cvv') entry=Payment(name=name,card=card,cvv=cvv) db.session.add(entry) db.session.commit() return render_template('payment.html') @app.route('/category',methods=['GET','POST']) def",
"db.Column(db.String(12), nullable=True) class Checkout(db.Model): sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) phone",
"delivery = db.Column(db.String(25), nullable=False) @app.route('/') def home(): return render_template('index.html') @app.route('/about') def about(): return",
"''' sno = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25), nullable=False) email = db.Column(db.String(20), nullable=False)"
] |
[
"max_length=120) keywords = models.CharField('Keywords', max_length=250) sort = models.PositiveIntegerField('Порядок', default=0, unique=True) banner = models.ImageField('Баннер',",
"kwargs={'slug': self.slug}) class Meta: verbose_name = 'Категория' verbose_name_plural = 'Категории' class Model(models.Model): \"\"\"Модель",
"blank=True, null=True) def __str__(self): return self.name def get_absolute_url(self): return reverse('service_model_view', kwargs={'category': self.category.slug, 'slug':",
"max_length=250) def __str__(self): return self.name def get_absolute_url(self): return reverse('service_category', kwargs={'slug': self.slug}) class Meta:",
"def __str__(self): return self.name def get_absolute_url(self): return reverse('service_category', kwargs={'slug': self.slug}) class Meta: verbose_name",
"= models.ImageField('Баннер', upload_to='images/', blank=True, null=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120)",
"models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) sort = models.PositiveIntegerField('Порядок', default=0, unique=True) banner =",
"null=True) def __str__(self): return self.name def get_absolute_url(self): return reverse('service_model_view', kwargs={'category': self.category.slug, 'slug': self.slug})",
"техники', upload_to='images/', blank=True, null=True) active = models.BooleanField('Опубликовать', default=True) title = models.CharField('Title', max_length=120) description",
"категории') banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) title = models.CharField('Title', max_length=120) description =",
"= models.ImageField('Картинка техники', upload_to='images/', blank=True, null=True) active = models.BooleanField('Опубликовать', default=True) title = models.CharField('Title',",
"def get_absolute_url(self): return reverse('service_model_view', kwargs={'category': self.category.slug, 'slug': self.slug}) class Meta: verbose_name = 'Модель'",
"upload_to='images/', blank=True, null=True) active = models.BooleanField('Опубликовать', default=True) title = models.CharField('Title', max_length=120) description =",
"max_length=120) slug = models.SlugField('URL', max_length=120, default='', unique=True) text = models.TextField('Текст модели', default='') header",
"= models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) sort = models.PositiveIntegerField('Порядок', default=0, unique=True) banner",
"models.CharField('Keywords', max_length=250) def __str__(self): return self.name def get_absolute_url(self): return reverse('service_category', kwargs={'slug': self.slug}) class",
"категории\"\"\" category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории') name = models.CharField('Название услуги', max_length=120)",
"title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) sort",
"def __str__(self): return self.name def get_absolute_url(self): return reverse('service_model_view', kwargs={'category': self.category.slug, 'slug': self.slug}) class",
"blank=True, null=True) active = models.BooleanField('Опубликовать', default=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description',",
"keywords = models.CharField('Keywords', max_length=250) def __str__(self): return self.name def get_absolute_url(self): return reverse('service_category', kwargs={'slug':",
"= 'Категории' class Model(models.Model): \"\"\"Модель из категории\"\"\" category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор",
"max_length=240, blank=True, null=True) sub_header = models.CharField('Подзаголовок', max_length=240, blank=True, null=True) images = models.ImageField('Картинка техники',",
"null=True) images = models.ImageField('Картинка техники', upload_to='images/', blank=True, null=True) active = models.BooleanField('Опубликовать', default=True) title",
"models.TextField('Текст категории') banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) title = models.CharField('Title', max_length=120) description",
"models.ImageField('Баннер', upload_to='images/', blank=True, null=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords",
"title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) def",
"images = models.ImageField('Картинка техники', upload_to='images/', blank=True, null=True) active = models.BooleanField('Опубликовать', default=True) title =",
"категории') name = models.CharField('Название услуги', max_length=120) slug = models.SlugField('URL', max_length=120, default='', unique=True) text",
"models from django.urls import reverse # Create your models here. class Category(models.Model): \"\"\"Кактегория",
"models here. class Category(models.Model): \"\"\"Кактегория услуг\"\"\" name = models.CharField('Название категории', max_length=120) slug =",
"blank=True, null=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords',",
"banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) def __str__(self): return self.name def get_absolute_url(self): return",
"\"\"\"Модель из категории\"\"\" category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории') name = models.CharField('Название",
"= models.SlugField('URL', max_length=120) text = models.TextField('Текст категории') banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True)",
"max_length=250) sort = models.PositiveIntegerField('Порядок', default=0, unique=True) banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) def",
"модели', default='') header = models.CharField('Заголовок', max_length=240, blank=True, null=True) sub_header = models.CharField('Подзаголовок', max_length=240, blank=True,",
"models.CharField('Подзаголовок', max_length=240, blank=True, null=True) images = models.ImageField('Картинка техники', upload_to='images/', blank=True, null=True) active =",
"models.CharField('Заголовок', max_length=240, blank=True, null=True) sub_header = models.CharField('Подзаголовок', max_length=240, blank=True, null=True) images = models.ImageField('Картинка",
"self.slug}) class Meta: verbose_name = 'Категория' verbose_name_plural = 'Категории' class Model(models.Model): \"\"\"Модель из",
"slug = models.SlugField('URL', max_length=120) text = models.TextField('Текст категории') banner = models.ImageField('Баннер', upload_to='images/', blank=True,",
"description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) def __str__(self): return self.name def",
"get_absolute_url(self): return reverse('service_model_view', kwargs={'category': self.category.slug, 'slug': self.slug}) class Meta: verbose_name = 'Модель' verbose_name_plural",
"max_length=120) text = models.TextField('Текст категории') banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) title =",
"description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) sort = models.PositiveIntegerField('Порядок', default=0, unique=True)",
"услуг\"\"\" name = models.CharField('Название категории', max_length=120) slug = models.SlugField('URL', max_length=120) text = models.TextField('Текст",
"verbose_name = 'Категория' verbose_name_plural = 'Категории' class Model(models.Model): \"\"\"Модель из категории\"\"\" category =",
"class Meta: verbose_name = 'Категория' verbose_name_plural = 'Категории' class Model(models.Model): \"\"\"Модель из категории\"\"\"",
"django.db import models from django.urls import reverse # Create your models here. class",
"return reverse('service_model_view', kwargs={'category': self.category.slug, 'slug': self.slug}) class Meta: verbose_name = 'Модель' verbose_name_plural =",
"banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description',",
"unique=True) banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) def __str__(self): return self.name def get_absolute_url(self):",
"models.PositiveIntegerField('Порядок', default=0, unique=True) banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) def __str__(self): return self.name",
"return self.name def get_absolute_url(self): return reverse('service_category', kwargs={'slug': self.slug}) class Meta: verbose_name = 'Категория'",
"text = models.TextField('Текст категории') banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) title = models.CharField('Title',",
"default='', unique=True) text = models.TextField('Текст модели', default='') header = models.CharField('Заголовок', max_length=240, blank=True, null=True)",
"here. class Category(models.Model): \"\"\"Кактегория услуг\"\"\" name = models.CharField('Название категории', max_length=120) slug = models.SlugField('URL',",
"из категории\"\"\" category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории') name = models.CharField('Название услуги',",
"= models.CharField('Название услуги', max_length=120) slug = models.SlugField('URL', max_length=120, default='', unique=True) text = models.TextField('Текст",
"reverse('service_model_view', kwargs={'category': self.category.slug, 'slug': self.slug}) class Meta: verbose_name = 'Модель' verbose_name_plural = 'Модели'",
"Model(models.Model): \"\"\"Модель из категории\"\"\" category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории') name =",
"= models.TextField('Текст модели', default='') header = models.CharField('Заголовок', max_length=240, blank=True, null=True) sub_header = models.CharField('Подзаголовок',",
"self.name def get_absolute_url(self): return reverse('service_model_view', kwargs={'category': self.category.slug, 'slug': self.slug}) class Meta: verbose_name =",
"category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории') name = models.CharField('Название услуги', max_length=120) slug",
"text = models.TextField('Текст модели', default='') header = models.CharField('Заголовок', max_length=240, blank=True, null=True) sub_header =",
"keywords = models.CharField('Keywords', max_length=250) sort = models.PositiveIntegerField('Порядок', default=0, unique=True) banner = models.ImageField('Баннер', upload_to='images/',",
"null=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250)",
"max_length=120) slug = models.SlugField('URL', max_length=120) text = models.TextField('Текст категории') banner = models.ImageField('Баннер', upload_to='images/',",
"'Категория' verbose_name_plural = 'Категории' class Model(models.Model): \"\"\"Модель из категории\"\"\" category = models.ForeignKey(Category, on_delete=models.SET_NULL,",
"import models from django.urls import reverse # Create your models here. class Category(models.Model):",
"name = models.CharField('Название категории', max_length=120) slug = models.SlugField('URL', max_length=120) text = models.TextField('Текст категории')",
"header = models.CharField('Заголовок', max_length=240, blank=True, null=True) sub_header = models.CharField('Подзаголовок', max_length=240, blank=True, null=True) images",
"default=0, unique=True) banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) def __str__(self): return self.name def",
"reverse('service_category', kwargs={'slug': self.slug}) class Meta: verbose_name = 'Категория' verbose_name_plural = 'Категории' class Model(models.Model):",
"from django.db import models from django.urls import reverse # Create your models here.",
"= models.TextField('Текст категории') banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) title = models.CharField('Title', max_length=120)",
"null=True) active = models.BooleanField('Опубликовать', default=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120)",
"models.ImageField('Баннер', upload_to='images/', blank=True, null=True) def __str__(self): return self.name def get_absolute_url(self): return reverse('service_model_view', kwargs={'category':",
"# Create your models here. class Category(models.Model): \"\"\"Кактегория услуг\"\"\" name = models.CharField('Название категории',",
"class Category(models.Model): \"\"\"Кактегория услуг\"\"\" name = models.CharField('Название категории', max_length=120) slug = models.SlugField('URL', max_length=120)",
"name = models.CharField('Название услуги', max_length=120) slug = models.SlugField('URL', max_length=120, default='', unique=True) text =",
"active = models.BooleanField('Опубликовать', default=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords",
"verbose_name_plural = 'Категории' class Model(models.Model): \"\"\"Модель из категории\"\"\" category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True,",
"= models.PositiveIntegerField('Порядок', default=0, unique=True) banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) def __str__(self): return",
"def get_absolute_url(self): return reverse('service_category', kwargs={'slug': self.slug}) class Meta: verbose_name = 'Категория' verbose_name_plural =",
"null=True) sub_header = models.CharField('Подзаголовок', max_length=240, blank=True, null=True) images = models.ImageField('Картинка техники', upload_to='images/', blank=True,",
"= models.CharField('Название категории', max_length=120) slug = models.SlugField('URL', max_length=120) text = models.TextField('Текст категории') banner",
"= 'Категория' verbose_name_plural = 'Категории' class Model(models.Model): \"\"\"Модель из категории\"\"\" category = models.ForeignKey(Category,",
"null=True, verbose_name='Выбор категории') name = models.CharField('Название услуги', max_length=120) slug = models.SlugField('URL', max_length=120, default='',",
"Category(models.Model): \"\"\"Кактегория услуг\"\"\" name = models.CharField('Название категории', max_length=120) slug = models.SlugField('URL', max_length=120) text",
"slug = models.SlugField('URL', max_length=120, default='', unique=True) text = models.TextField('Текст модели', default='') header =",
"reverse # Create your models here. class Category(models.Model): \"\"\"Кактегория услуг\"\"\" name = models.CharField('Название",
"django.urls import reverse # Create your models here. class Category(models.Model): \"\"\"Кактегория услуг\"\"\" name",
"return self.name def get_absolute_url(self): return reverse('service_model_view', kwargs={'category': self.category.slug, 'slug': self.slug}) class Meta: verbose_name",
"self.name def get_absolute_url(self): return reverse('service_category', kwargs={'slug': self.slug}) class Meta: verbose_name = 'Категория' verbose_name_plural",
"= models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) sort =",
"= models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) def __str__(self):",
"return reverse('service_category', kwargs={'slug': self.slug}) class Meta: verbose_name = 'Категория' verbose_name_plural = 'Категории' class",
"категории', max_length=120) slug = models.SlugField('URL', max_length=120) text = models.TextField('Текст категории') banner = models.ImageField('Баннер',",
"sort = models.PositiveIntegerField('Порядок', default=0, unique=True) banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) def __str__(self):",
"sub_header = models.CharField('Подзаголовок', max_length=240, blank=True, null=True) images = models.ImageField('Картинка техники', upload_to='images/', blank=True, null=True)",
"= models.CharField('Keywords', max_length=250) def __str__(self): return self.name def get_absolute_url(self): return reverse('service_category', kwargs={'slug': self.slug})",
"= models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) def __str__(self): return self.name def get_absolute_url(self):",
"default='') header = models.CharField('Заголовок', max_length=240, blank=True, null=True) sub_header = models.CharField('Подзаголовок', max_length=240, blank=True, null=True)",
"models.CharField('Keywords', max_length=250) sort = models.PositiveIntegerField('Порядок', default=0, unique=True) banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True)",
"models.SlugField('URL', max_length=120) text = models.TextField('Текст категории') banner = models.ImageField('Баннер', upload_to='images/', blank=True, null=True) title",
"blank=True, null=True) images = models.ImageField('Картинка техники', upload_to='images/', blank=True, null=True) active = models.BooleanField('Опубликовать', default=True)",
"models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории') name = models.CharField('Название услуги', max_length=120) slug = models.SlugField('URL',",
"models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) sort = models.PositiveIntegerField('Порядок',",
"models.ImageField('Картинка техники', upload_to='images/', blank=True, null=True) active = models.BooleanField('Опубликовать', default=True) title = models.CharField('Title', max_length=120)",
"models.CharField('Название услуги', max_length=120) slug = models.SlugField('URL', max_length=120, default='', unique=True) text = models.TextField('Текст модели',",
"max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) sort = models.PositiveIntegerField('Порядок', default=0,",
"= models.CharField('Keywords', max_length=250) sort = models.PositiveIntegerField('Порядок', default=0, unique=True) banner = models.ImageField('Баннер', upload_to='images/', blank=True,",
"= models.ImageField('Баннер', upload_to='images/', blank=True, null=True) def __str__(self): return self.name def get_absolute_url(self): return reverse('service_model_view',",
"get_absolute_url(self): return reverse('service_category', kwargs={'slug': self.slug}) class Meta: verbose_name = 'Категория' verbose_name_plural = 'Категории'",
"\"\"\"Кактегория услуг\"\"\" name = models.CharField('Название категории', max_length=120) slug = models.SlugField('URL', max_length=120) text =",
"= models.SlugField('URL', max_length=120, default='', unique=True) text = models.TextField('Текст модели', default='') header = models.CharField('Заголовок',",
"upload_to='images/', blank=True, null=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords =",
"Meta: verbose_name = 'Категория' verbose_name_plural = 'Категории' class Model(models.Model): \"\"\"Модель из категории\"\"\" category",
"models.TextField('Текст модели', default='') header = models.CharField('Заголовок', max_length=240, blank=True, null=True) sub_header = models.CharField('Подзаголовок', max_length=240,",
"models.BooleanField('Опубликовать', default=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords',",
"= models.CharField('Подзаголовок', max_length=240, blank=True, null=True) images = models.ImageField('Картинка техники', upload_to='images/', blank=True, null=True) active",
"'Категории' class Model(models.Model): \"\"\"Модель из категории\"\"\" category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории')",
"models.CharField('Название категории', max_length=120) slug = models.SlugField('URL', max_length=120) text = models.TextField('Текст категории') banner =",
"__str__(self): return self.name def get_absolute_url(self): return reverse('service_model_view', kwargs={'category': self.category.slug, 'slug': self.slug}) class Meta:",
"max_length=120, default='', unique=True) text = models.TextField('Текст модели', default='') header = models.CharField('Заголовок', max_length=240, blank=True,",
"= models.BooleanField('Опубликовать', default=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords =",
"class Model(models.Model): \"\"\"Модель из категории\"\"\" category = models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории') name",
"blank=True, null=True) sub_header = models.CharField('Подзаголовок', max_length=240, blank=True, null=True) images = models.ImageField('Картинка техники', upload_to='images/',",
"__str__(self): return self.name def get_absolute_url(self): return reverse('service_category', kwargs={'slug': self.slug}) class Meta: verbose_name =",
"models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) def __str__(self): return",
"unique=True) text = models.TextField('Текст модели', default='') header = models.CharField('Заголовок', max_length=240, blank=True, null=True) sub_header",
"max_length=120) keywords = models.CharField('Keywords', max_length=250) def __str__(self): return self.name def get_absolute_url(self): return reverse('service_category',",
"your models here. class Category(models.Model): \"\"\"Кактегория услуг\"\"\" name = models.CharField('Название категории', max_length=120) slug",
"models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) def __str__(self): return self.name def get_absolute_url(self): return",
"max_length=240, blank=True, null=True) images = models.ImageField('Картинка техники', upload_to='images/', blank=True, null=True) active = models.BooleanField('Опубликовать',",
"<gh_stars>0 from django.db import models from django.urls import reverse # Create your models",
"import reverse # Create your models here. class Category(models.Model): \"\"\"Кактегория услуг\"\"\" name =",
"upload_to='images/', blank=True, null=True) def __str__(self): return self.name def get_absolute_url(self): return reverse('service_model_view', kwargs={'category': self.category.slug,",
"= models.CharField('Заголовок', max_length=240, blank=True, null=True) sub_header = models.CharField('Подзаголовок', max_length=240, blank=True, null=True) images =",
"Create your models here. class Category(models.Model): \"\"\"Кактегория услуг\"\"\" name = models.CharField('Название категории', max_length=120)",
"= models.ForeignKey(Category, on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории') name = models.CharField('Название услуги', max_length=120) slug =",
"max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250) def __str__(self): return self.name",
"from django.urls import reverse # Create your models here. class Category(models.Model): \"\"\"Кактегория услуг\"\"\"",
"on_delete=models.SET_NULL, null=True, verbose_name='Выбор категории') name = models.CharField('Название услуги', max_length=120) slug = models.SlugField('URL', max_length=120,",
"verbose_name='Выбор категории') name = models.CharField('Название услуги', max_length=120) slug = models.SlugField('URL', max_length=120, default='', unique=True)",
"default=True) title = models.CharField('Title', max_length=120) description = models.CharField('Description', max_length=120) keywords = models.CharField('Keywords', max_length=250)",
"models.SlugField('URL', max_length=120, default='', unique=True) text = models.TextField('Текст модели', default='') header = models.CharField('Заголовок', max_length=240,",
"услуги', max_length=120) slug = models.SlugField('URL', max_length=120, default='', unique=True) text = models.TextField('Текст модели', default='')"
] |
[
"for ticket in data: print(ticket_validator(ticket)) tickets_info = input() data = [x.strip() for x",
"else: current_max_num += 1 return [current_max_num, special_char] def ticket_validator(ticket): ticket_condition = '' if",
"in '@#$^': ticket_condition = f'ticket \"{ticket}\" - 10{ticket[0]} Jackpot!' else: data_source = ''",
"len(ticket) != 20: ticket_condition = \"invalid ticket\" elif ticket[0] * 20 == ticket",
"1 return [current_max_num, special_char] def ticket_validator(ticket): ticket_condition = '' if len(ticket) != 20:",
"= additional_func(ticket[10:]) else: data_source = additional_func(ticket[0:10]) number_of_special_signs = data_source[0] special_sign = data_source[1] if",
"= f'ticket \"{ticket}\" - {number_of_special_signs}{special_sign}' return ticket_condition def winning_ticket(data): for ticket in data:",
"else: data_source = additional_func(ticket[0:10]) number_of_special_signs = data_source[0] special_sign = data_source[1] if number_of_special_signs <",
"'' for ch in partition: if ch != special_char: if current_max_num >= 6:",
"= 1 special_char = ch else: current_max_num += 1 return [current_max_num, special_char] def",
"\"invalid ticket\" elif ticket[0] * 20 == ticket and ticket[0] in '@#$^': ticket_condition",
"else: data_source = '' if additional_func(ticket[0:10]) > additional_func(ticket[10:]): data_source = additional_func(ticket[10:]) else: data_source",
"def winning_ticket(data): for ticket in data: print(ticket_validator(ticket)) tickets_info = input() data = [x.strip()",
"- 10{ticket[0]} Jackpot!' else: data_source = '' if additional_func(ticket[0:10]) > additional_func(ticket[10:]): data_source =",
"number_of_special_signs = data_source[0] special_sign = data_source[1] if number_of_special_signs < 6 or special_sign not",
"special_sign = data_source[1] if number_of_special_signs < 6 or special_sign not in '@#$^': ticket_condition",
"in '@#$^': ticket_condition = f'ticket \"{ticket}\" - no match' else: ticket_condition = f'ticket",
"= ch else: current_max_num += 1 return [current_max_num, special_char] def ticket_validator(ticket): ticket_condition =",
"ticket_condition = f'ticket \"{ticket}\" - no match' else: ticket_condition = f'ticket \"{ticket}\" -",
"ticket_validator(ticket): ticket_condition = '' if len(ticket) != 20: ticket_condition = \"invalid ticket\" elif",
"special_char: if current_max_num >= 6: break current_max_num = 1 special_char = ch else:",
"* 20 == ticket and ticket[0] in '@#$^': ticket_condition = f'ticket \"{ticket}\" -",
"ch else: current_max_num += 1 return [current_max_num, special_char] def ticket_validator(ticket): ticket_condition = ''",
"ticket[0] in '@#$^': ticket_condition = f'ticket \"{ticket}\" - 10{ticket[0]} Jackpot!' else: data_source =",
"number_of_special_signs < 6 or special_sign not in '@#$^': ticket_condition = f'ticket \"{ticket}\" -",
"and ticket[0] in '@#$^': ticket_condition = f'ticket \"{ticket}\" - 10{ticket[0]} Jackpot!' else: data_source",
"'' if additional_func(ticket[0:10]) > additional_func(ticket[10:]): data_source = additional_func(ticket[10:]) else: data_source = additional_func(ticket[0:10]) number_of_special_signs",
"= additional_func(ticket[0:10]) number_of_special_signs = data_source[0] special_sign = data_source[1] if number_of_special_signs < 6 or",
"in data: print(ticket_validator(ticket)) tickets_info = input() data = [x.strip() for x in tickets_info.split(',')]",
"ch != special_char: if current_max_num >= 6: break current_max_num = 1 special_char =",
"- {number_of_special_signs}{special_sign}' return ticket_condition def winning_ticket(data): for ticket in data: print(ticket_validator(ticket)) tickets_info =",
"!= special_char: if current_max_num >= 6: break current_max_num = 1 special_char = ch",
"ticket_condition def winning_ticket(data): for ticket in data: print(ticket_validator(ticket)) tickets_info = input() data =",
"if len(ticket) != 20: ticket_condition = \"invalid ticket\" elif ticket[0] * 20 ==",
"= '' if additional_func(ticket[0:10]) > additional_func(ticket[10:]): data_source = additional_func(ticket[10:]) else: data_source = additional_func(ticket[0:10])",
"Jackpot!' else: data_source = '' if additional_func(ticket[0:10]) > additional_func(ticket[10:]): data_source = additional_func(ticket[10:]) else:",
"def ticket_validator(ticket): ticket_condition = '' if len(ticket) != 20: ticket_condition = \"invalid ticket\"",
"if additional_func(ticket[0:10]) > additional_func(ticket[10:]): data_source = additional_func(ticket[10:]) else: data_source = additional_func(ticket[0:10]) number_of_special_signs =",
"\"{ticket}\" - {number_of_special_signs}{special_sign}' return ticket_condition def winning_ticket(data): for ticket in data: print(ticket_validator(ticket)) tickets_info",
"if ch != special_char: if current_max_num >= 6: break current_max_num = 1 special_char",
"= \"invalid ticket\" elif ticket[0] * 20 == ticket and ticket[0] in '@#$^':",
"0 special_char = '' for ch in partition: if ch != special_char: if",
"additional_func(ticket[0:10]) number_of_special_signs = data_source[0] special_sign = data_source[1] if number_of_special_signs < 6 or special_sign",
"- no match' else: ticket_condition = f'ticket \"{ticket}\" - {number_of_special_signs}{special_sign}' return ticket_condition def",
"data_source = '' if additional_func(ticket[0:10]) > additional_func(ticket[10:]): data_source = additional_func(ticket[10:]) else: data_source =",
"additional_func(partition): current_max_num = 0 special_char = '' for ch in partition: if ch",
"6 or special_sign not in '@#$^': ticket_condition = f'ticket \"{ticket}\" - no match'",
"current_max_num = 0 special_char = '' for ch in partition: if ch !=",
"ticket and ticket[0] in '@#$^': ticket_condition = f'ticket \"{ticket}\" - 10{ticket[0]} Jackpot!' else:",
"{number_of_special_signs}{special_sign}' return ticket_condition def winning_ticket(data): for ticket in data: print(ticket_validator(ticket)) tickets_info = input()",
"'@#$^': ticket_condition = f'ticket \"{ticket}\" - no match' else: ticket_condition = f'ticket \"{ticket}\"",
">= 6: break current_max_num = 1 special_char = ch else: current_max_num += 1",
"return [current_max_num, special_char] def ticket_validator(ticket): ticket_condition = '' if len(ticket) != 20: ticket_condition",
"= data_source[0] special_sign = data_source[1] if number_of_special_signs < 6 or special_sign not in",
"ticket[0] * 20 == ticket and ticket[0] in '@#$^': ticket_condition = f'ticket \"{ticket}\"",
"20: ticket_condition = \"invalid ticket\" elif ticket[0] * 20 == ticket and ticket[0]",
"= '' if len(ticket) != 20: ticket_condition = \"invalid ticket\" elif ticket[0] *",
"else: ticket_condition = f'ticket \"{ticket}\" - {number_of_special_signs}{special_sign}' return ticket_condition def winning_ticket(data): for ticket",
"partition: if ch != special_char: if current_max_num >= 6: break current_max_num = 1",
"match' else: ticket_condition = f'ticket \"{ticket}\" - {number_of_special_signs}{special_sign}' return ticket_condition def winning_ticket(data): for",
"in partition: if ch != special_char: if current_max_num >= 6: break current_max_num =",
"< 6 or special_sign not in '@#$^': ticket_condition = f'ticket \"{ticket}\" - no",
"ticket in data: print(ticket_validator(ticket)) tickets_info = input() data = [x.strip() for x in",
"special_char = '' for ch in partition: if ch != special_char: if current_max_num",
"ticket_condition = '' if len(ticket) != 20: ticket_condition = \"invalid ticket\" elif ticket[0]",
"ch in partition: if ch != special_char: if current_max_num >= 6: break current_max_num",
"= 0 special_char = '' for ch in partition: if ch != special_char:",
"'' if len(ticket) != 20: ticket_condition = \"invalid ticket\" elif ticket[0] * 20",
"= f'ticket \"{ticket}\" - 10{ticket[0]} Jackpot!' else: data_source = '' if additional_func(ticket[0:10]) >",
"\"{ticket}\" - no match' else: ticket_condition = f'ticket \"{ticket}\" - {number_of_special_signs}{special_sign}' return ticket_condition",
"1 special_char = ch else: current_max_num += 1 return [current_max_num, special_char] def ticket_validator(ticket):",
"ticket_condition = \"invalid ticket\" elif ticket[0] * 20 == ticket and ticket[0] in",
"f'ticket \"{ticket}\" - no match' else: ticket_condition = f'ticket \"{ticket}\" - {number_of_special_signs}{special_sign}' return",
"break current_max_num = 1 special_char = ch else: current_max_num += 1 return [current_max_num,",
"data_source = additional_func(ticket[10:]) else: data_source = additional_func(ticket[0:10]) number_of_special_signs = data_source[0] special_sign = data_source[1]",
"f'ticket \"{ticket}\" - {number_of_special_signs}{special_sign}' return ticket_condition def winning_ticket(data): for ticket in data: print(ticket_validator(ticket))",
"ticket_condition = f'ticket \"{ticket}\" - 10{ticket[0]} Jackpot!' else: data_source = '' if additional_func(ticket[0:10])",
"ticket\" elif ticket[0] * 20 == ticket and ticket[0] in '@#$^': ticket_condition =",
"= f'ticket \"{ticket}\" - no match' else: ticket_condition = f'ticket \"{ticket}\" - {number_of_special_signs}{special_sign}'",
"<reponame>GYosifov88/Python-Fundamentals<filename>winning_ticket.py def additional_func(partition): current_max_num = 0 special_char = '' for ch in partition:",
"additional_func(ticket[0:10]) > additional_func(ticket[10:]): data_source = additional_func(ticket[10:]) else: data_source = additional_func(ticket[0:10]) number_of_special_signs = data_source[0]",
"return ticket_condition def winning_ticket(data): for ticket in data: print(ticket_validator(ticket)) tickets_info = input() data",
"= '' for ch in partition: if ch != special_char: if current_max_num >=",
"[current_max_num, special_char] def ticket_validator(ticket): ticket_condition = '' if len(ticket) != 20: ticket_condition =",
"20 == ticket and ticket[0] in '@#$^': ticket_condition = f'ticket \"{ticket}\" - 10{ticket[0]}",
"data_source = additional_func(ticket[0:10]) number_of_special_signs = data_source[0] special_sign = data_source[1] if number_of_special_signs < 6",
"special_sign not in '@#$^': ticket_condition = f'ticket \"{ticket}\" - no match' else: ticket_condition",
"no match' else: ticket_condition = f'ticket \"{ticket}\" - {number_of_special_signs}{special_sign}' return ticket_condition def winning_ticket(data):",
"'@#$^': ticket_condition = f'ticket \"{ticket}\" - 10{ticket[0]} Jackpot!' else: data_source = '' if",
"current_max_num >= 6: break current_max_num = 1 special_char = ch else: current_max_num +=",
"special_char] def ticket_validator(ticket): ticket_condition = '' if len(ticket) != 20: ticket_condition = \"invalid",
"6: break current_max_num = 1 special_char = ch else: current_max_num += 1 return",
"current_max_num += 1 return [current_max_num, special_char] def ticket_validator(ticket): ticket_condition = '' if len(ticket)",
"data: print(ticket_validator(ticket)) tickets_info = input() data = [x.strip() for x in tickets_info.split(',')] winning_ticket(data)",
"f'ticket \"{ticket}\" - 10{ticket[0]} Jackpot!' else: data_source = '' if additional_func(ticket[0:10]) > additional_func(ticket[10:]):",
"def additional_func(partition): current_max_num = 0 special_char = '' for ch in partition: if",
"+= 1 return [current_max_num, special_char] def ticket_validator(ticket): ticket_condition = '' if len(ticket) !=",
"10{ticket[0]} Jackpot!' else: data_source = '' if additional_func(ticket[0:10]) > additional_func(ticket[10:]): data_source = additional_func(ticket[10:])",
"winning_ticket(data): for ticket in data: print(ticket_validator(ticket)) tickets_info = input() data = [x.strip() for",
"ticket_condition = f'ticket \"{ticket}\" - {number_of_special_signs}{special_sign}' return ticket_condition def winning_ticket(data): for ticket in",
"if number_of_special_signs < 6 or special_sign not in '@#$^': ticket_condition = f'ticket \"{ticket}\"",
"> additional_func(ticket[10:]): data_source = additional_func(ticket[10:]) else: data_source = additional_func(ticket[0:10]) number_of_special_signs = data_source[0] special_sign",
"\"{ticket}\" - 10{ticket[0]} Jackpot!' else: data_source = '' if additional_func(ticket[0:10]) > additional_func(ticket[10:]): data_source",
"for ch in partition: if ch != special_char: if current_max_num >= 6: break",
"!= 20: ticket_condition = \"invalid ticket\" elif ticket[0] * 20 == ticket and",
"additional_func(ticket[10:]): data_source = additional_func(ticket[10:]) else: data_source = additional_func(ticket[0:10]) number_of_special_signs = data_source[0] special_sign =",
"additional_func(ticket[10:]) else: data_source = additional_func(ticket[0:10]) number_of_special_signs = data_source[0] special_sign = data_source[1] if number_of_special_signs",
"not in '@#$^': ticket_condition = f'ticket \"{ticket}\" - no match' else: ticket_condition =",
"or special_sign not in '@#$^': ticket_condition = f'ticket \"{ticket}\" - no match' else:",
"= data_source[1] if number_of_special_signs < 6 or special_sign not in '@#$^': ticket_condition =",
"elif ticket[0] * 20 == ticket and ticket[0] in '@#$^': ticket_condition = f'ticket",
"special_char = ch else: current_max_num += 1 return [current_max_num, special_char] def ticket_validator(ticket): ticket_condition",
"if current_max_num >= 6: break current_max_num = 1 special_char = ch else: current_max_num",
"data_source[0] special_sign = data_source[1] if number_of_special_signs < 6 or special_sign not in '@#$^':",
"current_max_num = 1 special_char = ch else: current_max_num += 1 return [current_max_num, special_char]",
"== ticket and ticket[0] in '@#$^': ticket_condition = f'ticket \"{ticket}\" - 10{ticket[0]} Jackpot!'",
"data_source[1] if number_of_special_signs < 6 or special_sign not in '@#$^': ticket_condition = f'ticket"
] |
[
"### commands auto generated by Alembic - please adjust! ### op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey')",
"message Revision ID: bbedc353f90c Revises: d67eab226ecd Create Date: 2021-10-26 12:05:38.840492 \"\"\" import sqlalchemy_utils",
"Alembic. revision = 'bbedc353f90c' down_revision = 'd67eab226ecd' branch_labels = None depends_on = None",
"2021-10-26 12:05:38.840492 \"\"\" import sqlalchemy_utils from alembic import op import sqlalchemy as sa",
"op.create_foreign_key(None, 'client', 'referral', ['referral_id'], ['id'], ondelete='SET NULL') # ### end Alembic commands ###",
"type_='foreignkey') op.create_foreign_key(None, 'client', 'referral', ['referral_id'], ['id'], ondelete='SET NULL') # ### end Alembic commands",
"'bbedc353f90c' down_revision = 'd67eab226ecd' branch_labels = None depends_on = None def upgrade(): #",
"downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None,",
"alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic.",
"### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'client', type_='foreignkey')",
"= 'bbedc353f90c' down_revision = 'd67eab226ecd' branch_labels = None depends_on = None def upgrade():",
"generated by Alembic - please adjust! ### op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey') op.create_foreign_key(None, 'client', 'referral',",
"d67eab226ecd Create Date: 2021-10-26 12:05:38.840492 \"\"\" import sqlalchemy_utils from alembic import op import",
"['id'], ondelete='SET NULL') # ### end Alembic commands ### def downgrade(): # ###",
"op import sqlalchemy as sa # revision identifiers, used by Alembic. revision =",
"def downgrade(): # ### commands auto generated by Alembic - please adjust! ###",
"'referral', ['referral_id'], ['id'], ondelete='SET NULL') # ### end Alembic commands ### def downgrade():",
"auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey', 'client',",
"import sqlalchemy_utils from alembic import op import sqlalchemy as sa # revision identifiers,",
"# revision identifiers, used by Alembic. revision = 'bbedc353f90c' down_revision = 'd67eab226ecd' branch_labels",
"['referral_id'], ['id'], ondelete='SET NULL') # ### end Alembic commands ### def downgrade(): #",
"### op.drop_constraint(None, 'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey', 'client', 'referral', ['referral_id'], ['id']) # ### end Alembic",
"by Alembic. revision = 'bbedc353f90c' down_revision = 'd67eab226ecd' branch_labels = None depends_on =",
"by Alembic - please adjust! ### op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey') op.create_foreign_key(None, 'client', 'referral', ['referral_id'],",
"Date: 2021-10-26 12:05:38.840492 \"\"\" import sqlalchemy_utils from alembic import op import sqlalchemy as",
"ID: bbedc353f90c Revises: d67eab226ecd Create Date: 2021-10-26 12:05:38.840492 \"\"\" import sqlalchemy_utils from alembic",
"please adjust! ### op.drop_constraint(None, 'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey', 'client', 'referral', ['referral_id'], ['id']) # ###",
"by Alembic - please adjust! ### op.drop_constraint(None, 'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey', 'client', 'referral', ['referral_id'],",
"import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision",
"Revises: d67eab226ecd Create Date: 2021-10-26 12:05:38.840492 \"\"\" import sqlalchemy_utils from alembic import op",
"### def downgrade(): # ### commands auto generated by Alembic - please adjust!",
"None depends_on = None def upgrade(): # ### commands auto generated by Alembic",
"commands auto generated by Alembic - please adjust! ### op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey') op.create_foreign_key(None,",
"'client', type_='foreignkey') op.create_foreign_key(None, 'client', 'referral', ['referral_id'], ['id'], ondelete='SET NULL') # ### end Alembic",
"<gh_stars>0 \"\"\"empty message Revision ID: bbedc353f90c Revises: d67eab226ecd Create Date: 2021-10-26 12:05:38.840492 \"\"\"",
"op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey') op.create_foreign_key(None, 'client', 'referral', ['referral_id'], ['id'], ondelete='SET NULL') # ### end",
"Alembic commands ### def downgrade(): # ### commands auto generated by Alembic -",
"branch_labels = None depends_on = None def upgrade(): # ### commands auto generated",
"end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic",
"Revision ID: bbedc353f90c Revises: d67eab226ecd Create Date: 2021-10-26 12:05:38.840492 \"\"\" import sqlalchemy_utils from",
"12:05:38.840492 \"\"\" import sqlalchemy_utils from alembic import op import sqlalchemy as sa #",
"sa # revision identifiers, used by Alembic. revision = 'bbedc353f90c' down_revision = 'd67eab226ecd'",
"def upgrade(): # ### commands auto generated by Alembic - please adjust! ###",
"Alembic - please adjust! ### op.drop_constraint(None, 'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey', 'client', 'referral', ['referral_id'], ['id'])",
"= 'd67eab226ecd' branch_labels = None depends_on = None def upgrade(): # ### commands",
"revision = 'bbedc353f90c' down_revision = 'd67eab226ecd' branch_labels = None depends_on = None def",
"commands ### def downgrade(): # ### commands auto generated by Alembic - please",
"- please adjust! ### op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey') op.create_foreign_key(None, 'client', 'referral', ['referral_id'], ['id'], ondelete='SET",
"- please adjust! ### op.drop_constraint(None, 'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey', 'client', 'referral', ['referral_id'], ['id']) #",
"\"\"\"empty message Revision ID: bbedc353f90c Revises: d67eab226ecd Create Date: 2021-10-26 12:05:38.840492 \"\"\" import",
"as sa # revision identifiers, used by Alembic. revision = 'bbedc353f90c' down_revision =",
"= None depends_on = None def upgrade(): # ### commands auto generated by",
"down_revision = 'd67eab226ecd' branch_labels = None depends_on = None def upgrade(): # ###",
"used by Alembic. revision = 'bbedc353f90c' down_revision = 'd67eab226ecd' branch_labels = None depends_on",
"ondelete='SET NULL') # ### end Alembic commands ### def downgrade(): # ### commands",
"identifiers, used by Alembic. revision = 'bbedc353f90c' down_revision = 'd67eab226ecd' branch_labels = None",
"= None def upgrade(): # ### commands auto generated by Alembic - please",
"bbedc353f90c Revises: d67eab226ecd Create Date: 2021-10-26 12:05:38.840492 \"\"\" import sqlalchemy_utils from alembic import",
"Alembic - please adjust! ### op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey') op.create_foreign_key(None, 'client', 'referral', ['referral_id'], ['id'],",
"Create Date: 2021-10-26 12:05:38.840492 \"\"\" import sqlalchemy_utils from alembic import op import sqlalchemy",
"please adjust! ### op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey') op.create_foreign_key(None, 'client', 'referral', ['referral_id'], ['id'], ondelete='SET NULL')",
"'client', 'referral', ['referral_id'], ['id'], ondelete='SET NULL') # ### end Alembic commands ### def",
"adjust! ### op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey') op.create_foreign_key(None, 'client', 'referral', ['referral_id'], ['id'], ondelete='SET NULL') #",
"### op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey') op.create_foreign_key(None, 'client', 'referral', ['referral_id'], ['id'], ondelete='SET NULL') # ###",
"None def upgrade(): # ### commands auto generated by Alembic - please adjust!",
"depends_on = None def upgrade(): # ### commands auto generated by Alembic -",
"import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'bbedc353f90c'",
"revision identifiers, used by Alembic. revision = 'bbedc353f90c' down_revision = 'd67eab226ecd' branch_labels =",
"# ### commands auto generated by Alembic - please adjust! ### op.drop_constraint('client_referral_id_fkey', 'client',",
"commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey',",
"op.drop_constraint(None, 'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey', 'client', 'referral', ['referral_id'], ['id']) # ### end Alembic commands",
"'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey', 'client', 'referral', ['referral_id'], ['id']) # ### end Alembic commands ###",
"sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'bbedc353f90c' down_revision",
"# ### end Alembic commands ### def downgrade(): # ### commands auto generated",
"auto generated by Alembic - please adjust! ### op.drop_constraint('client_referral_id_fkey', 'client', type_='foreignkey') op.create_foreign_key(None, 'client',",
"'d67eab226ecd' branch_labels = None depends_on = None def upgrade(): # ### commands auto",
"upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_constraint('client_referral_id_fkey',",
"# ### commands auto generated by Alembic - please adjust! ### op.drop_constraint(None, 'client',",
"from alembic import op import sqlalchemy as sa # revision identifiers, used by",
"\"\"\" import sqlalchemy_utils from alembic import op import sqlalchemy as sa # revision",
"### end Alembic commands ### def downgrade(): # ### commands auto generated by",
"sqlalchemy_utils from alembic import op import sqlalchemy as sa # revision identifiers, used",
"generated by Alembic - please adjust! ### op.drop_constraint(None, 'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey', 'client', 'referral',",
"NULL') # ### end Alembic commands ### def downgrade(): # ### commands auto",
"adjust! ### op.drop_constraint(None, 'client', type_='foreignkey') op.create_foreign_key('client_referral_id_fkey', 'client', 'referral', ['referral_id'], ['id']) # ### end"
] |
[
"def is_composite_coder(coder): \"\"\"Check if a coder is composite. Args: coder (coder): Coder. Returns:",
"\"\"\" return all([i > 0 for i in B.shape(x)]) def batch(x, other_dims): \"\"\"Get",
"compress_batch_dimensions(x, other_dims): \"\"\"Compress multiple batch dimensions of a tensor into a single batch",
"arguments already set. \"\"\" @wraps(f) def wrapped_f(*args, **kw_args): return f(*partial_args, *args, **partial_kw_args, **kw_args)",
"return z[index] def with_first_last(xs): \"\"\"Return a generator which indicates whether the returned element",
"tensor: Selection from `z`. \"\"\" axis = resolve_axis(z, axis) index = [slice(None, None,",
"multiple tensors. Args: z (tensor): Tensor to split. sizes (iterable[int]): Sizes of the",
"be correct for positive `axis`, so resolve the index. axis = resolve_axis(z, axis)",
"keyword arguments. Returns: function: Version of `f` with some arguments and keyword arguments",
"*shape[axis + 1 :]) def merge_dimensions(z, axis, sizes): \"\"\"Merge dimensions of a tensor",
"z, *shape[: axis - len(sizes) + 1], np.prod(sizes), *shape[axis + 1 :], )",
"index = [slice(None, None, None) for _ in range(B.rank(z))] index[axis] = slice(i, i",
"`axis`, so resolve the index. axis = resolve_axis(z, axis) return B.reshape(z, *shape[:axis], *sizes,",
"\"is_framework_module\", \"modules\", \"register_module\", \"models\", \"register_model\", \"composite_coders\", \"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\", \"batch\", \"compress_batch_dimensions\", \"split\",",
"Object to check. Returns: bool: `True` if `x` is a framework module, else",
"def is_nonempty(x): \"\"\"Check if a tensor is not empty. Args: x (tensor): Tensor.",
"*args, **partial_kw_args, **kw_args) return wrapped_f def is_nonempty(x): \"\"\"Check if a tensor is not",
"composite. \"\"\" return any([isinstance(coder, c) for c in composite_coders]) def wrapped_partial(f, *partial_args, **partial_kw_args):",
"\"register_model\", \"composite_coders\", \"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\", \"batch\", \"compress_batch_dimensions\", \"split\", \"split_dimension\", \"merge_dimensions\", \"select\", \"with_first_last\",",
"is the opposite of :func:`split_dimension`. Args: z (tensor): Tensor to merge. axis (int):",
"to wrap. Yields: bool: Element is first. bool: Element is last. object: Element.",
"from lab.util import resolve_axis from . import _dispatch __all__ = [ \"is_framework_module\", \"modules\",",
"axis): \"\"\"Split a tensor into multiple tensors. Args: z (tensor): Tensor to split.",
"of non-batch dimensions. Returns: tuple[int]: Shape of batch dimensions. \"\"\" return B.shape(x)[:-other_dims] def",
"We will need a `prev`, but there is no `prev` yet. Take the",
"len(b) == 1: return x, lambda x: x else: def uncompress(x_after): return B.reshape(x_after,",
"module. Args: x (object): Object to check. Returns: bool: `True` if `x` is",
"coders def register_composite_coder(coder): \"\"\"Decorator to register a composite coder.\"\"\" composite_coders.append(coder) return coder def",
"return B.shape(x)[:-other_dims] def compress_batch_dimensions(x, other_dims): \"\"\"Compress multiple batch dimensions of a tensor into",
"merge. Returns: tensor: Reshaped version of `z`. \"\"\" shape = B.shape(z) # The",
"other_dims) if len(b) == 1: return x, lambda x: x else: def uncompress(x_after):",
"Tensor to compress. other_dims (int): Number of non-batch dimensions. Returns: tensor: `x` with",
"def is_framework_module(x): \"\"\"Check if something is a framework module. Args: x (object): Object",
"so resolve the index. axis = resolve_axis(z, axis) return B.reshape( z, *shape[: axis",
"is composite. \"\"\" return any([isinstance(coder, c) for c in composite_coders]) def wrapped_partial(f, *partial_args,",
"\"with_first_last\", ] @_dispatch def is_framework_module(x): \"\"\"Check if something is a framework module. Args:",
"(function): Function to wrap. *partial_args: Partial arguments. **partial_kw_args: Partial keyword arguments. Returns: function:",
"def uncompress(x_after): return B.reshape(x_after, *b, *B.shape(x_after)[1:]) return B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b) :]), uncompress def",
"a tensor into multiple tensors. Args: z (tensor): Tensor to split. sizes (iterable[int]):",
"a new model.\"\"\" models.append(model) return model composite_coders = [] #: Composite coders def",
"i += size return components def split_dimension(z, axis, sizes): \"\"\"Split a dimension of",
"index. axis = resolve_axis(z, axis) return B.reshape(z, *shape[:axis], *sizes, *shape[axis + 1 :])",
"a framework module, else `False`. \"\"\" return False modules = [] #: Registered",
"i = 0 for size in sizes: index[axis] = slice(i, i + size,",
"the split. \"\"\" axis = resolve_axis(z, axis) index = [slice(None, None, None)] *",
"sizes (iterable[int]): Sizes of dimensions to merge. Returns: tensor: Reshaped version of `z`.",
"with_first_last(xs): \"\"\"Return a generator which indicates whether the returned element is the first",
"axis = resolve_axis(z, axis) index = [slice(None, None, None)] * B.rank(z) components =",
"return True else: return False prev = None have_prev = False cur =",
"int(np.prod(b)), *B.shape(x)[len(b) :]), uncompress def split(z, sizes, axis): \"\"\"Split a tensor into multiple",
"x, lambda x: x else: def uncompress(x_after): return B.reshape(x_after, *b, *B.shape(x_after)[1:]) return B.reshape(x,",
"components.append(z[tuple(index)]) i += size return components def split_dimension(z, axis, sizes): \"\"\"Split a dimension",
"Shape of batch dimensions. \"\"\" return B.shape(x)[:-other_dims] def compress_batch_dimensions(x, other_dims): \"\"\"Compress multiple batch",
"`z`. \"\"\" axis = resolve_axis(z, axis) index = [slice(None, None, None) for _",
"return B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b) :]), uncompress def split(z, sizes, axis): \"\"\"Split a tensor",
"a particular index `i` at axis `axis` without squeezing the tensor. Args: z",
"\"\"\" return B.shape(x)[:-other_dims] def compress_batch_dimensions(x, other_dims): \"\"\"Compress multiple batch dimensions of a tensor",
"x in xs: cur = x have_cur = True if not have_prev: #",
"axis) return B.reshape(z, *shape[:axis], *sizes, *shape[axis + 1 :]) def merge_dimensions(z, axis, sizes):",
"one. yield first(), False, prev prev = cur if have_cur: yield first(), True,",
"have_cur = False for x in xs: cur = x have_cur = True",
"\"\"\"Split a tensor into multiple tensors. Args: z (tensor): Tensor to split. sizes",
"Yields: bool: Element is first. bool: Element is last. object: Element. \"\"\" state",
"@_dispatch def is_framework_module(x): \"\"\"Check if something is a framework module. Args: x (object):",
"import lab as B import numpy as np from lab.util import resolve_axis from",
"B.shape(x)]) def batch(x, other_dims): \"\"\"Get the shape of the batch of a tensor.",
"iteration. prev = cur have_prev = True continue # We currently have available",
"new model.\"\"\" models.append(model) return model composite_coders = [] #: Composite coders def register_composite_coder(coder):",
"compress. other_dims (int): Number of non-batch dimensions. Returns: tensor: `x` with batch dimensions",
"something is a framework module. Args: x (object): Object to check. Returns: bool:",
"arguments. **partial_kw_args: Partial keyword arguments. Returns: function: Version of `f` with some arguments",
"Coder. Returns: bool: Coder is composite. \"\"\" return any([isinstance(coder, c) for c in",
"other_dims (int): Number of non-batch dimensions. Returns: tuple[int]: Shape of batch dimensions. \"\"\"",
"Args: z (tensor): Tensor to select from. i (int): Index to select. axis",
"= [] #: Registered models def register_model(model): \"\"\"Decorator to register a new model.\"\"\"",
"is_composite_coder(coder): \"\"\"Check if a coder is composite. Args: coder (coder): Coder. Returns: bool:",
"Tensor. Returns: bool: `True` if `x` is not empty, otherwise `False`. \"\"\" return",
"is a framework module. Args: x (object): Object to check. Returns: bool: `True`",
"split. axis (int): Axis to split sizes (iterable[int]): Sizes of new dimensions. Returns:",
"select from. i (int): Index to select. axis (int): Axis to select from.",
"composite coder.\"\"\" composite_coders.append(coder) return coder def is_composite_coder(coder): \"\"\"Check if a coder is composite.",
"x (tensor): Tensor. Returns: bool: `True` if `x` is not empty, otherwise `False`.",
"of the batch of a tensor. Args: x (tensor): Tensor. other_dims (int): Number",
"the shape of the batch of a tensor. Args: x (tensor): Tensor. other_dims",
"index[axis] = slice(i, i + size, None) components.append(z[tuple(index)]) i += size return components",
"at axis `axis` without squeezing the tensor. Args: z (tensor): Tensor to select",
"z (tensor): Tensor to split. axis (int): Axis to split sizes (iterable[int]): Sizes",
"def first(): if state[\"first\"]: state[\"first\"] = False return True else: return False prev",
"the docstring. Args: f (function): Function to wrap. *partial_args: Partial arguments. **partial_kw_args: Partial",
"of the split. \"\"\" axis = resolve_axis(z, axis) index = [slice(None, None, None)]",
"other_dims): \"\"\"Compress multiple batch dimensions of a tensor into a single batch dimension.",
"have available `prev` and `cur`. We will return `prev` and, # after the",
"first or last. Args: xs: Generator to wrap. Yields: bool: Element is first.",
"in composite_coders]) def wrapped_partial(f, *partial_args, **partial_kw_args): \"\"\"Like :func:`functools.partial`, but preserves the docstring. Args:",
"`i` at axis `axis` without squeezing the tensor. Args: z (tensor): Tensor to",
"resolve_axis(z, axis) index = [slice(None, None, None)] * B.rank(z) components = [] i",
"resolve_axis from . import _dispatch __all__ = [ \"is_framework_module\", \"modules\", \"register_module\", \"models\", \"register_model\",",
"tensor. Args: z (tensor): Tensor to select from. i (int): Index to select.",
"components = [] i = 0 for size in sizes: index[axis] = slice(i,",
"`z`. \"\"\" shape = B.shape(z) # The indexing below will only be correct",
"index `i` at axis `axis` without squeezing the tensor. Args: z (tensor): Tensor",
"model.\"\"\" models.append(model) return model composite_coders = [] #: Composite coders def register_composite_coder(coder): \"\"\"Decorator",
"non-batch dimensions. Returns: tuple[int]: Shape of batch dimensions. \"\"\" return B.shape(x)[:-other_dims] def compress_batch_dimensions(x,",
"`True` if `x` is a framework module, else `False`. \"\"\" return False modules",
"a tensor is not empty. Args: x (tensor): Tensor. Returns: bool: `True` if",
"composite_coders = [] #: Composite coders def register_composite_coder(coder): \"\"\"Decorator to register a composite",
"= [] #: Composite coders def register_composite_coder(coder): \"\"\"Decorator to register a composite coder.\"\"\"",
"into a single batch dimension. Args: x (tensor): Tensor to compress. other_dims (int):",
"function: Version of `f` with some arguments and keyword arguments already set. \"\"\"",
"f (function): Function to wrap. *partial_args: Partial arguments. **partial_kw_args: Partial keyword arguments. Returns:",
"False prev = None have_prev = False cur = None have_cur = False",
"batch dimensions of a tensor into a single batch dimension. Args: x (tensor):",
"\"\"\"Split a dimension of a tensor into multiple dimensions. Args: z (tensor): Tensor",
":func:`split_dimension`. Args: z (tensor): Tensor to merge. axis (int): Axis to merge into.",
"Axis to split sizes (iterable[int]): Sizes of new dimensions. Returns: tensor: Reshaped version",
"Reshaped version of `z`. \"\"\" shape = B.shape(z) # The indexing below will",
"The indexing below will only be correct for positive `axis`, so resolve the",
"multiple batch dimensions of a tensor into a single batch dimension. Args: x",
"`prev`, but there is no `prev` yet. Take the current one as #",
"x have_cur = True if not have_prev: # We will need a `prev`,",
"in range(B.rank(z))] index[axis] = slice(i, i + 1, None) return z[index] def with_first_last(xs):",
"Args: x (tensor): Tensor. Returns: bool: `True` if `x` is not empty, otherwise",
"i (int): Index to select. axis (int): Axis to select from. Returns: tensor:",
"{\"first\": True} def first(): if state[\"first\"]: state[\"first\"] = False return True else: return",
"resolve_axis(z, axis) return B.reshape(z, *shape[:axis], *sizes, *shape[axis + 1 :]) def merge_dimensions(z, axis,",
"bool: Coder is composite. \"\"\" return any([isinstance(coder, c) for c in composite_coders]) def",
"= None have_cur = False for x in xs: cur = x have_cur",
"Element is first. bool: Element is last. object: Element. \"\"\" state = {\"first\":",
"**kw_args): return f(*partial_args, *args, **partial_kw_args, **kw_args) return wrapped_f def is_nonempty(x): \"\"\"Check if a",
"B import numpy as np from lab.util import resolve_axis from . import _dispatch",
":]) def merge_dimensions(z, axis, sizes): \"\"\"Merge dimensions of a tensor into one dimension.",
"batch(x, other_dims): \"\"\"Get the shape of the batch of a tensor. Args: x",
"np.prod(sizes), *shape[axis + 1 :], ) def select(z, i, axis): \"\"\"Select a particular",
"cur = None have_cur = False for x in xs: cur = x",
"and skip to the next iteration. prev = cur have_prev = True continue",
"merge. axis (int): Axis to merge into. sizes (iterable[int]): Sizes of dimensions to",
"= slice(i, i + size, None) components.append(z[tuple(index)]) i += size return components def",
"Element is last. object: Element. \"\"\" state = {\"first\": True} def first(): if",
"= resolve_axis(z, axis) return B.reshape(z, *shape[:axis], *sizes, *shape[axis + 1 :]) def merge_dimensions(z,",
"bool: `True` if `x` is a framework module, else `False`. \"\"\" return False",
"\"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\", \"batch\", \"compress_batch_dimensions\", \"split\", \"split_dimension\", \"merge_dimensions\", \"select\", \"with_first_last\", ] @_dispatch",
"wrapped_f def is_nonempty(x): \"\"\"Check if a tensor is not empty. Args: x (tensor):",
"Registered models def register_model(model): \"\"\"Decorator to register a new model.\"\"\" models.append(model) return model",
"B.reshape(x_after, *b, *B.shape(x_after)[1:]) return B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b) :]), uncompress def split(z, sizes, axis):",
"will only be correct for positive `axis`, so resolve the index. axis =",
"= resolve_axis(z, axis) index = [slice(None, None, None) for _ in range(B.rank(z))] index[axis]",
"the last one. yield first(), False, prev prev = cur if have_cur: yield",
"dimensions. Returns: tensor: Reshaped version of `z`. \"\"\" shape = B.shape(z) # The",
"\"select\", \"with_first_last\", ] @_dispatch def is_framework_module(x): \"\"\"Check if something is a framework module.",
"index. axis = resolve_axis(z, axis) return B.reshape( z, *shape[: axis - len(sizes) +",
"return x, lambda x: x else: def uncompress(x_after): return B.reshape(x_after, *b, *B.shape(x_after)[1:]) return",
"def wrapped_f(*args, **kw_args): return f(*partial_args, *args, **partial_kw_args, **kw_args) return wrapped_f def is_nonempty(x): \"\"\"Check",
"batch dimensions. \"\"\" return B.shape(x)[:-other_dims] def compress_batch_dimensions(x, other_dims): \"\"\"Compress multiple batch dimensions of",
"\"\"\"Return a generator which indicates whether the returned element is the first or",
"sizes (iterable[int]): Sizes of new dimensions. Returns: tensor: Reshaped version of `z`. \"\"\"",
"set. \"\"\" @wraps(f) def wrapped_f(*args, **kw_args): return f(*partial_args, *args, **partial_kw_args, **kw_args) return wrapped_f",
"empty. Args: x (tensor): Tensor. Returns: bool: `True` if `x` is not empty,",
"`axis` without squeezing the tensor. Args: z (tensor): Tensor to select from. i",
"f(*partial_args, *args, **partial_kw_args, **kw_args) return wrapped_f def is_nonempty(x): \"\"\"Check if a tensor is",
"coder (coder): Coder. Returns: bool: Coder is composite. \"\"\" return any([isinstance(coder, c) for",
"z (tensor): Tensor to select from. i (int): Index to select. axis (int):",
"dimensions. Args: z (tensor): Tensor to split. axis (int): Axis to split sizes",
"models = [] #: Registered models def register_model(model): \"\"\"Decorator to register a new",
"composite. Args: coder (coder): Coder. Returns: bool: Coder is composite. \"\"\" return any([isinstance(coder,",
"select(z, i, axis): \"\"\"Select a particular index `i` at axis `axis` without squeezing",
"*B.shape(x_after)[1:]) return B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b) :]), uncompress def split(z, sizes, axis): \"\"\"Split a",
"components. axis (int): Axis. Returns: list[tensor]: Components of the split. \"\"\" axis =",
"modules def register_module(module): \"\"\"Decorator to register a new module.\"\"\" modules.append(module) return module models",
"def select(z, i, axis): \"\"\"Select a particular index `i` at axis `axis` without",
"Args: coder (coder): Coder. Returns: bool: Coder is composite. \"\"\" return any([isinstance(coder, c)",
"def wrapped_partial(f, *partial_args, **partial_kw_args): \"\"\"Like :func:`functools.partial`, but preserves the docstring. Args: f (function):",
"of a tensor into a single batch dimension. Args: x (tensor): Tensor to",
"(iterable[int]): Sizes of dimensions to merge. Returns: tensor: Reshaped version of `z`. \"\"\"",
"model composite_coders = [] #: Composite coders def register_composite_coder(coder): \"\"\"Decorator to register a",
"have_prev = False cur = None have_cur = False for x in xs:",
"if `x` is not empty, otherwise `False`. \"\"\" return all([i > 0 for",
"def split_dimension(z, axis, sizes): \"\"\"Split a dimension of a tensor into multiple dimensions.",
"import numpy as np from lab.util import resolve_axis from . import _dispatch __all__",
":], ) def select(z, i, axis): \"\"\"Select a particular index `i` at axis",
"# after the loop has finished, return `cur` as the last one. yield",
"register_module(module): \"\"\"Decorator to register a new module.\"\"\" modules.append(module) return module models = []",
"below will only be correct for positive `axis`, so resolve the index. axis",
"currently have available `prev` and `cur`. We will return `prev` and, # after",
"to wrap. *partial_args: Partial arguments. **partial_kw_args: Partial keyword arguments. Returns: function: Version of",
"tensors. Args: z (tensor): Tensor to split. sizes (iterable[int]): Sizes of the components.",
"*b, *B.shape(x_after)[1:]) return B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b) :]), uncompress def split(z, sizes, axis): \"\"\"Split",
"\"\"\" return False modules = [] #: Registered modules def register_module(module): \"\"\"Decorator to",
"finished, return `cur` as the last one. yield first(), False, prev prev =",
"size return components def split_dimension(z, axis, sizes): \"\"\"Split a dimension of a tensor",
"Function to undo the compression of the batch dimensions. \"\"\" b = batch(x,",
"state = {\"first\": True} def first(): if state[\"first\"]: state[\"first\"] = False return True",
"split. sizes (iterable[int]): Sizes of the components. axis (int): Axis. Returns: list[tensor]: Components",
"range(B.rank(z))] index[axis] = slice(i, i + 1, None) return z[index] def with_first_last(xs): \"\"\"Return",
"state[\"first\"]: state[\"first\"] = False return True else: return False prev = None have_prev",
"= cur have_prev = True continue # We currently have available `prev` and",
"None have_prev = False cur = None have_cur = False for x in",
"Function to wrap. *partial_args: Partial arguments. **partial_kw_args: Partial keyword arguments. Returns: function: Version",
"Args: z (tensor): Tensor to split. axis (int): Axis to split sizes (iterable[int]):",
"Axis to merge into. sizes (iterable[int]): Sizes of dimensions to merge. Returns: tensor:",
"of a tensor into multiple dimensions. Args: z (tensor): Tensor to split. axis",
"None, None) for _ in range(B.rank(z))] index[axis] = slice(i, i + 1, None)",
"resolve the index. axis = resolve_axis(z, axis) return B.reshape(z, *shape[:axis], *sizes, *shape[axis +",
"new dimensions. Returns: tensor: Reshaped version of `z`. \"\"\" shape = B.shape(z) #",
"else: return False prev = None have_prev = False cur = None have_cur",
"return False modules = [] #: Registered modules def register_module(module): \"\"\"Decorator to register",
"some arguments and keyword arguments already set. \"\"\" @wraps(f) def wrapped_f(*args, **kw_args): return",
"last. object: Element. \"\"\" state = {\"first\": True} def first(): if state[\"first\"]: state[\"first\"]",
"True continue # We currently have available `prev` and `cur`. We will return",
"\"\"\" @wraps(f) def wrapped_f(*args, **kw_args): return f(*partial_args, *args, **partial_kw_args, **kw_args) return wrapped_f def",
"x: x else: def uncompress(x_after): return B.reshape(x_after, *b, *B.shape(x_after)[1:]) return B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b)",
"`x` is not empty, otherwise `False`. \"\"\" return all([i > 0 for i",
"axis (int): Axis. Returns: list[tensor]: Components of the split. \"\"\" axis = resolve_axis(z,",
"dimensions of a tensor into a single batch dimension. Args: x (tensor): Tensor",
"have_cur = True if not have_prev: # We will need a `prev`, but",
"*partial_args, **partial_kw_args): \"\"\"Like :func:`functools.partial`, but preserves the docstring. Args: f (function): Function to",
"`f` with some arguments and keyword arguments already set. \"\"\" @wraps(f) def wrapped_f(*args,",
"batch of a tensor. Args: x (tensor): Tensor. other_dims (int): Number of non-batch",
"keyword arguments already set. \"\"\" @wraps(f) def wrapped_f(*args, **kw_args): return f(*partial_args, *args, **partial_kw_args,",
"Args: f (function): Function to wrap. *partial_args: Partial arguments. **partial_kw_args: Partial keyword arguments.",
"tensor: `x` with batch dimensions compressed. function: Function to undo the compression of",
"with some arguments and keyword arguments already set. \"\"\" @wraps(f) def wrapped_f(*args, **kw_args):",
"\"models\", \"register_model\", \"composite_coders\", \"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\", \"batch\", \"compress_batch_dimensions\", \"split\", \"split_dimension\", \"merge_dimensions\", \"select\",",
"*shape[axis + 1 :], ) def select(z, i, axis): \"\"\"Select a particular index",
"c in composite_coders]) def wrapped_partial(f, *partial_args, **partial_kw_args): \"\"\"Like :func:`functools.partial`, but preserves the docstring.",
"_ in range(B.rank(z))] index[axis] = slice(i, i + 1, None) return z[index] def",
"Tensor. other_dims (int): Number of non-batch dimensions. Returns: tuple[int]: Shape of batch dimensions.",
"(tensor): Tensor to split. sizes (iterable[int]): Sizes of the components. axis (int): Axis.",
"preserves the docstring. Args: f (function): Function to wrap. *partial_args: Partial arguments. **partial_kw_args:",
"sizes): \"\"\"Split a dimension of a tensor into multiple dimensions. Args: z (tensor):",
"axis = resolve_axis(z, axis) return B.reshape(z, *shape[:axis], *sizes, *shape[axis + 1 :]) def",
"= batch(x, other_dims) if len(b) == 1: return x, lambda x: x else:",
"= B.shape(z) # The indexing below will only be correct for positive `axis`,",
"\"\"\"Check if a coder is composite. Args: coder (coder): Coder. Returns: bool: Coder",
"+ size, None) components.append(z[tuple(index)]) i += size return components def split_dimension(z, axis, sizes):",
"Args: z (tensor): Tensor to split. sizes (iterable[int]): Sizes of the components. axis",
"return `prev` and, # after the loop has finished, return `cur` as the",
"if something is a framework module. Args: x (object): Object to check. Returns:",
"\"\"\"Check if a tensor is not empty. Args: x (tensor): Tensor. Returns: bool:",
"\"\"\"Check if something is a framework module. Args: x (object): Object to check.",
"> 0 for i in B.shape(x)]) def batch(x, other_dims): \"\"\"Get the shape of",
"\"modules\", \"register_module\", \"models\", \"register_model\", \"composite_coders\", \"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\", \"batch\", \"compress_batch_dimensions\", \"split\", \"split_dimension\",",
"last. Args: xs: Generator to wrap. Yields: bool: Element is first. bool: Element",
"have_prev: # We will need a `prev`, but there is no `prev` yet.",
"as # `prev` and skip to the next iteration. prev = cur have_prev",
"coder.\"\"\" composite_coders.append(coder) return coder def is_composite_coder(coder): \"\"\"Check if a coder is composite. Args:",
"(int): Number of non-batch dimensions. Returns: tuple[int]: Shape of batch dimensions. \"\"\" return",
"import _dispatch __all__ = [ \"is_framework_module\", \"modules\", \"register_module\", \"models\", \"register_model\", \"composite_coders\", \"register_composite_coder\", \"is_composite_coder\",",
"operation is the opposite of :func:`split_dimension`. Args: z (tensor): Tensor to merge. axis",
"B.reshape(z, *shape[:axis], *sizes, *shape[axis + 1 :]) def merge_dimensions(z, axis, sizes): \"\"\"Merge dimensions",
"composite_coders]) def wrapped_partial(f, *partial_args, **partial_kw_args): \"\"\"Like :func:`functools.partial`, but preserves the docstring. Args: f",
"wrap. *partial_args: Partial arguments. **partial_kw_args: Partial keyword arguments. Returns: function: Version of `f`",
"(tensor): Tensor. Returns: bool: `True` if `x` is not empty, otherwise `False`. \"\"\"",
"not empty, otherwise `False`. \"\"\" return all([i > 0 for i in B.shape(x)])",
"Axis. Returns: list[tensor]: Components of the split. \"\"\" axis = resolve_axis(z, axis) index",
"# We will need a `prev`, but there is no `prev` yet. Take",
"module models = [] #: Registered models def register_model(model): \"\"\"Decorator to register a",
"merge_dimensions(z, axis, sizes): \"\"\"Merge dimensions of a tensor into one dimension. This operation",
"else `False`. \"\"\" return False modules = [] #: Registered modules def register_module(module):",
"0 for i in B.shape(x)]) def batch(x, other_dims): \"\"\"Get the shape of the",
"other_dims): \"\"\"Get the shape of the batch of a tensor. Args: x (tensor):",
"True} def first(): if state[\"first\"]: state[\"first\"] = False return True else: return False",
"- len(sizes) + 1], np.prod(sizes), *shape[axis + 1 :], ) def select(z, i,",
"= [ \"is_framework_module\", \"modules\", \"register_module\", \"models\", \"register_model\", \"composite_coders\", \"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\", \"batch\",",
"axis (int): Axis to merge into. sizes (iterable[int]): Sizes of dimensions to merge.",
"a tensor into a single batch dimension. Args: x (tensor): Tensor to compress.",
"return coder def is_composite_coder(coder): \"\"\"Check if a coder is composite. Args: coder (coder):",
"to register a new module.\"\"\" modules.append(module) return module models = [] #: Registered",
"all([i > 0 for i in B.shape(x)]) def batch(x, other_dims): \"\"\"Get the shape",
"into multiple dimensions. Args: z (tensor): Tensor to split. axis (int): Axis to",
"object: Element. \"\"\" state = {\"first\": True} def first(): if state[\"first\"]: state[\"first\"] =",
"* B.rank(z) components = [] i = 0 for size in sizes: index[axis]",
"= [slice(None, None, None) for _ in range(B.rank(z))] index[axis] = slice(i, i +",
"*shape[:axis], *sizes, *shape[axis + 1 :]) def merge_dimensions(z, axis, sizes): \"\"\"Merge dimensions of",
"tensor. Args: x (tensor): Tensor. other_dims (int): Number of non-batch dimensions. Returns: tuple[int]:",
"] @_dispatch def is_framework_module(x): \"\"\"Check if something is a framework module. Args: x",
"new module.\"\"\" modules.append(module) return module models = [] #: Registered models def register_model(model):",
"`prev` and, # after the loop has finished, return `cur` as the last",
"shape = B.shape(z) # The indexing below will only be correct for positive",
"1: return x, lambda x: x else: def uncompress(x_after): return B.reshape(x_after, *b, *B.shape(x_after)[1:])",
"the current one as # `prev` and skip to the next iteration. prev",
"of `f` with some arguments and keyword arguments already set. \"\"\" @wraps(f) def",
"Returns: bool: `True` if `x` is a framework module, else `False`. \"\"\" return",
"any([isinstance(coder, c) for c in composite_coders]) def wrapped_partial(f, *partial_args, **partial_kw_args): \"\"\"Like :func:`functools.partial`, but",
"Returns: tensor: Reshaped version of `z`. \"\"\" shape = B.shape(z) # The indexing",
"dimension. This operation is the opposite of :func:`split_dimension`. Args: z (tensor): Tensor to",
"whether the returned element is the first or last. Args: xs: Generator to",
"+= size return components def split_dimension(z, axis, sizes): \"\"\"Split a dimension of a",
"`prev` and `cur`. We will return `prev` and, # after the loop has",
"Args: x (tensor): Tensor. other_dims (int): Number of non-batch dimensions. Returns: tuple[int]: Shape",
"if a coder is composite. Args: coder (coder): Coder. Returns: bool: Coder is",
"**partial_kw_args: Partial keyword arguments. Returns: function: Version of `f` with some arguments and",
"is the first or last. Args: xs: Generator to wrap. Yields: bool: Element",
"\"\"\" axis = resolve_axis(z, axis) index = [slice(None, None, None) for _ in",
"positive `axis`, so resolve the index. axis = resolve_axis(z, axis) return B.reshape(z, *shape[:axis],",
"None, None)] * B.rank(z) components = [] i = 0 for size in",
"for _ in range(B.rank(z))] index[axis] = slice(i, i + 1, None) return z[index]",
"to merge into. sizes (iterable[int]): Sizes of dimensions to merge. Returns: tensor: Reshaped",
"to select from. Returns: tensor: Selection from `z`. \"\"\" axis = resolve_axis(z, axis)",
"has finished, return `cur` as the last one. yield first(), False, prev prev",
"the batch dimensions. \"\"\" b = batch(x, other_dims) if len(b) == 1: return",
"tensor into multiple dimensions. Args: z (tensor): Tensor to split. axis (int): Axis",
"Returns: bool: `True` if `x` is not empty, otherwise `False`. \"\"\" return all([i",
"False cur = None have_cur = False for x in xs: cur =",
"def with_first_last(xs): \"\"\"Return a generator which indicates whether the returned element is the",
"module, else `False`. \"\"\" return False modules = [] #: Registered modules def",
"tensor into one dimension. This operation is the opposite of :func:`split_dimension`. Args: z",
"particular index `i` at axis `axis` without squeezing the tensor. Args: z (tensor):",
"axis = resolve_axis(z, axis) index = [slice(None, None, None) for _ in range(B.rank(z))]",
"if `x` is a framework module, else `False`. \"\"\" return False modules =",
"is not empty, otherwise `False`. \"\"\" return all([i > 0 for i in",
"from functools import wraps import lab as B import numpy as np from",
"of `z`. \"\"\" shape = B.shape(z) # The indexing below will only be",
"is_nonempty(x): \"\"\"Check if a tensor is not empty. Args: x (tensor): Tensor. Returns:",
"#: Registered modules def register_module(module): \"\"\"Decorator to register a new module.\"\"\" modules.append(module) return",
"dimension of a tensor into multiple dimensions. Args: z (tensor): Tensor to split.",
"(tensor): Tensor to compress. other_dims (int): Number of non-batch dimensions. Returns: tensor: `x`",
"[] #: Composite coders def register_composite_coder(coder): \"\"\"Decorator to register a composite coder.\"\"\" composite_coders.append(coder)",
"sizes, axis): \"\"\"Split a tensor into multiple tensors. Args: z (tensor): Tensor to",
"def split(z, sizes, axis): \"\"\"Split a tensor into multiple tensors. Args: z (tensor):",
"the loop has finished, return `cur` as the last one. yield first(), False,",
"register_model(model): \"\"\"Decorator to register a new model.\"\"\" models.append(model) return model composite_coders = []",
"version of `z`. \"\"\" shape = B.shape(z) # The indexing below will only",
"dimensions to merge. Returns: tensor: Reshaped version of `z`. \"\"\" shape = B.shape(z)",
"\"wrapped_partial\", \"is_nonempty\", \"batch\", \"compress_batch_dimensions\", \"split\", \"split_dimension\", \"merge_dimensions\", \"select\", \"with_first_last\", ] @_dispatch def is_framework_module(x):",
"register a new model.\"\"\" models.append(model) return model composite_coders = [] #: Composite coders",
"(iterable[int]): Sizes of the components. axis (int): Axis. Returns: list[tensor]: Components of the",
"index[axis] = slice(i, i + 1, None) return z[index] def with_first_last(xs): \"\"\"Return a",
"Partial keyword arguments. Returns: function: Version of `f` with some arguments and keyword",
"(int): Number of non-batch dimensions. Returns: tensor: `x` with batch dimensions compressed. function:",
"Index to select. axis (int): Axis to select from. Returns: tensor: Selection from",
"return False prev = None have_prev = False cur = None have_cur =",
"\"\"\"Decorator to register a new module.\"\"\" modules.append(module) return module models = [] #:",
"to register a composite coder.\"\"\" composite_coders.append(coder) return coder def is_composite_coder(coder): \"\"\"Check if a",
"We will return `prev` and, # after the loop has finished, return `cur`",
"\"\"\"Decorator to register a new model.\"\"\" models.append(model) return model composite_coders = [] #:",
"[] i = 0 for size in sizes: index[axis] = slice(i, i +",
"+ 1], np.prod(sizes), *shape[axis + 1 :], ) def select(z, i, axis): \"\"\"Select",
"\"split\", \"split_dimension\", \"merge_dimensions\", \"select\", \"with_first_last\", ] @_dispatch def is_framework_module(x): \"\"\"Check if something is",
"i + size, None) components.append(z[tuple(index)]) i += size return components def split_dimension(z, axis,",
"`x` with batch dimensions compressed. function: Function to undo the compression of the",
"sizes): \"\"\"Merge dimensions of a tensor into one dimension. This operation is the",
"# The indexing below will only be correct for positive `axis`, so resolve",
"opposite of :func:`split_dimension`. Args: z (tensor): Tensor to merge. axis (int): Axis to",
"dimensions. Returns: tensor: `x` with batch dimensions compressed. function: Function to undo the",
"i, axis): \"\"\"Select a particular index `i` at axis `axis` without squeezing the",
"`False`. \"\"\" return all([i > 0 for i in B.shape(x)]) def batch(x, other_dims):",
"wrapped_f(*args, **kw_args): return f(*partial_args, *args, **partial_kw_args, **kw_args) return wrapped_f def is_nonempty(x): \"\"\"Check if",
"`False`. \"\"\" return False modules = [] #: Registered modules def register_module(module): \"\"\"Decorator",
"functools import wraps import lab as B import numpy as np from lab.util",
"arguments. Returns: function: Version of `f` with some arguments and keyword arguments already",
"to merge. axis (int): Axis to merge into. sizes (iterable[int]): Sizes of dimensions",
"the tensor. Args: z (tensor): Tensor to select from. i (int): Index to",
"modules = [] #: Registered modules def register_module(module): \"\"\"Decorator to register a new",
"a composite coder.\"\"\" composite_coders.append(coder) return coder def is_composite_coder(coder): \"\"\"Check if a coder is",
"dimensions. Returns: tuple[int]: Shape of batch dimensions. \"\"\" return B.shape(x)[:-other_dims] def compress_batch_dimensions(x, other_dims):",
"to register a new model.\"\"\" models.append(model) return model composite_coders = [] #: Composite",
"select. axis (int): Axis to select from. Returns: tensor: Selection from `z`. \"\"\"",
"the returned element is the first or last. Args: xs: Generator to wrap.",
"squeezing the tensor. Args: z (tensor): Tensor to select from. i (int): Index",
"have_prev = True continue # We currently have available `prev` and `cur`. We",
"`prev` and skip to the next iteration. prev = cur have_prev = True",
"\"\"\" shape = B.shape(z) # The indexing below will only be correct for",
"the next iteration. prev = cur have_prev = True continue # We currently",
"= [slice(None, None, None)] * B.rank(z) components = [] i = 0 for",
"which indicates whether the returned element is the first or last. Args: xs:",
"other_dims (int): Number of non-batch dimensions. Returns: tensor: `x` with batch dimensions compressed.",
"continue # We currently have available `prev` and `cur`. We will return `prev`",
"\"is_nonempty\", \"batch\", \"compress_batch_dimensions\", \"split\", \"split_dimension\", \"merge_dimensions\", \"select\", \"with_first_last\", ] @_dispatch def is_framework_module(x): \"\"\"Check",
"empty, otherwise `False`. \"\"\" return all([i > 0 for i in B.shape(x)]) def",
"to split sizes (iterable[int]): Sizes of new dimensions. Returns: tensor: Reshaped version of",
"Args: xs: Generator to wrap. Yields: bool: Element is first. bool: Element is",
"first. bool: Element is last. object: Element. \"\"\" state = {\"first\": True} def",
"register a new module.\"\"\" modules.append(module) return module models = [] #: Registered models",
"the index. axis = resolve_axis(z, axis) return B.reshape(z, *shape[:axis], *sizes, *shape[axis + 1",
"def register_module(module): \"\"\"Decorator to register a new module.\"\"\" modules.append(module) return module models =",
"modules.append(module) return module models = [] #: Registered models def register_model(model): \"\"\"Decorator to",
"of the batch dimensions. \"\"\" b = batch(x, other_dims) if len(b) == 1:",
"`axis`, so resolve the index. axis = resolve_axis(z, axis) return B.reshape( z, *shape[:",
"Args: x (tensor): Tensor to compress. other_dims (int): Number of non-batch dimensions. Returns:",
"the compression of the batch dimensions. \"\"\" b = batch(x, other_dims) if len(b)",
"function: Function to undo the compression of the batch dimensions. \"\"\" b =",
"axis (int): Axis to select from. Returns: tensor: Selection from `z`. \"\"\" axis",
"`x` is a framework module, else `False`. \"\"\" return False modules = []",
"into. sizes (iterable[int]): Sizes of dimensions to merge. Returns: tensor: Reshaped version of",
"is_framework_module(x): \"\"\"Check if something is a framework module. Args: x (object): Object to",
"prev = cur have_prev = True continue # We currently have available `prev`",
"z[index] def with_first_last(xs): \"\"\"Return a generator which indicates whether the returned element is",
"of new dimensions. Returns: tensor: Reshaped version of `z`. \"\"\" shape = B.shape(z)",
"bool: Element is last. object: Element. \"\"\" state = {\"first\": True} def first():",
"components def split_dimension(z, axis, sizes): \"\"\"Split a dimension of a tensor into multiple",
"This operation is the opposite of :func:`split_dimension`. Args: z (tensor): Tensor to merge.",
"the opposite of :func:`split_dimension`. Args: z (tensor): Tensor to merge. axis (int): Axis",
"x (tensor): Tensor. other_dims (int): Number of non-batch dimensions. Returns: tuple[int]: Shape of",
"dimensions of a tensor into one dimension. This operation is the opposite of",
"Sizes of new dimensions. Returns: tensor: Reshaped version of `z`. \"\"\" shape =",
"without squeezing the tensor. Args: z (tensor): Tensor to select from. i (int):",
"def register_model(model): \"\"\"Decorator to register a new model.\"\"\" models.append(model) return model composite_coders =",
"one dimension. This operation is the opposite of :func:`split_dimension`. Args: z (tensor): Tensor",
"current one as # `prev` and skip to the next iteration. prev =",
"numpy as np from lab.util import resolve_axis from . import _dispatch __all__ =",
"batch dimension. Args: x (tensor): Tensor to compress. other_dims (int): Number of non-batch",
"import wraps import lab as B import numpy as np from lab.util import",
"yield first(), False, prev prev = cur if have_cur: yield first(), True, cur",
"`prev` yet. Take the current one as # `prev` and skip to the",
"slice(i, i + size, None) components.append(z[tuple(index)]) i += size return components def split_dimension(z,",
"(int): Axis to split sizes (iterable[int]): Sizes of new dimensions. Returns: tensor: Reshaped",
"slice(i, i + 1, None) return z[index] def with_first_last(xs): \"\"\"Return a generator which",
"a framework module. Args: x (object): Object to check. Returns: bool: `True` if",
"\"\"\" b = batch(x, other_dims) if len(b) == 1: return x, lambda x:",
"docstring. Args: f (function): Function to wrap. *partial_args: Partial arguments. **partial_kw_args: Partial keyword",
"positive `axis`, so resolve the index. axis = resolve_axis(z, axis) return B.reshape( z,",
"len(sizes) + 1], np.prod(sizes), *shape[axis + 1 :], ) def select(z, i, axis):",
"return all([i > 0 for i in B.shape(x)]) def batch(x, other_dims): \"\"\"Get the",
"indexing below will only be correct for positive `axis`, so resolve the index.",
"cur have_prev = True continue # We currently have available `prev` and `cur`.",
"= resolve_axis(z, axis) index = [slice(None, None, None)] * B.rank(z) components = []",
"size in sizes: index[axis] = slice(i, i + size, None) components.append(z[tuple(index)]) i +=",
"of dimensions to merge. Returns: tensor: Reshaped version of `z`. \"\"\" shape =",
"return `cur` as the last one. yield first(), False, prev prev = cur",
"*shape[: axis - len(sizes) + 1], np.prod(sizes), *shape[axis + 1 :], ) def",
"correct for positive `axis`, so resolve the index. axis = resolve_axis(z, axis) return",
") def select(z, i, axis): \"\"\"Select a particular index `i` at axis `axis`",
"in xs: cur = x have_cur = True if not have_prev: # We",
"as the last one. yield first(), False, prev prev = cur if have_cur:",
"last one. yield first(), False, prev prev = cur if have_cur: yield first(),",
"of non-batch dimensions. Returns: tensor: `x` with batch dimensions compressed. function: Function to",
"a tensor into one dimension. This operation is the opposite of :func:`split_dimension`. Args:",
"for x in xs: cur = x have_cur = True if not have_prev:",
"split sizes (iterable[int]): Sizes of new dimensions. Returns: tensor: Reshaped version of `z`.",
"prev = None have_prev = False cur = None have_cur = False for",
"but there is no `prev` yet. Take the current one as # `prev`",
"a coder is composite. Args: coder (coder): Coder. Returns: bool: Coder is composite.",
"wrap. Yields: bool: Element is first. bool: Element is last. object: Element. \"\"\"",
"Sizes of the components. axis (int): Axis. Returns: list[tensor]: Components of the split.",
"of the components. axis (int): Axis. Returns: list[tensor]: Components of the split. \"\"\"",
"B.shape(x)[:-other_dims] def compress_batch_dimensions(x, other_dims): \"\"\"Compress multiple batch dimensions of a tensor into a",
"a single batch dimension. Args: x (tensor): Tensor to compress. other_dims (int): Number",
"= {\"first\": True} def first(): if state[\"first\"]: state[\"first\"] = False return True else:",
"lambda x: x else: def uncompress(x_after): return B.reshape(x_after, *b, *B.shape(x_after)[1:]) return B.reshape(x, int(np.prod(b)),",
"for i in B.shape(x)]) def batch(x, other_dims): \"\"\"Get the shape of the batch",
"of batch dimensions. \"\"\" return B.shape(x)[:-other_dims] def compress_batch_dimensions(x, other_dims): \"\"\"Compress multiple batch dimensions",
"1 :], ) def select(z, i, axis): \"\"\"Select a particular index `i` at",
"returned element is the first or last. Args: xs: Generator to wrap. Yields:",
"Partial arguments. **partial_kw_args: Partial keyword arguments. Returns: function: Version of `f` with some",
"def compress_batch_dimensions(x, other_dims): \"\"\"Compress multiple batch dimensions of a tensor into a single",
"a `prev`, but there is no `prev` yet. Take the current one as",
"_dispatch __all__ = [ \"is_framework_module\", \"modules\", \"register_module\", \"models\", \"register_model\", \"composite_coders\", \"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\",",
"(tensor): Tensor to merge. axis (int): Axis to merge into. sizes (iterable[int]): Sizes",
"there is no `prev` yet. Take the current one as # `prev` and",
"in B.shape(x)]) def batch(x, other_dims): \"\"\"Get the shape of the batch of a",
"non-batch dimensions. Returns: tensor: `x` with batch dimensions compressed. function: Function to undo",
"`cur`. We will return `prev` and, # after the loop has finished, return",
"`True` if `x` is not empty, otherwise `False`. \"\"\" return all([i > 0",
"to select. axis (int): Axis to select from. Returns: tensor: Selection from `z`.",
"#: Registered models def register_model(model): \"\"\"Decorator to register a new model.\"\"\" models.append(model) return",
"to undo the compression of the batch dimensions. \"\"\" b = batch(x, other_dims)",
"in sizes: index[axis] = slice(i, i + size, None) components.append(z[tuple(index)]) i += size",
"state[\"first\"] = False return True else: return False prev = None have_prev =",
"axis, sizes): \"\"\"Split a dimension of a tensor into multiple dimensions. Args: z",
"Tensor to merge. axis (int): Axis to merge into. sizes (iterable[int]): Sizes of",
"register_composite_coder(coder): \"\"\"Decorator to register a composite coder.\"\"\" composite_coders.append(coder) return coder def is_composite_coder(coder): \"\"\"Check",
"element is the first or last. Args: xs: Generator to wrap. Yields: bool:",
"\"compress_batch_dimensions\", \"split\", \"split_dimension\", \"merge_dimensions\", \"select\", \"with_first_last\", ] @_dispatch def is_framework_module(x): \"\"\"Check if something",
"tuple[int]: Shape of batch dimensions. \"\"\" return B.shape(x)[:-other_dims] def compress_batch_dimensions(x, other_dims): \"\"\"Compress multiple",
"(coder): Coder. Returns: bool: Coder is composite. \"\"\" return any([isinstance(coder, c) for c",
"axis) index = [slice(None, None, None) for _ in range(B.rank(z))] index[axis] = slice(i,",
"but preserves the docstring. Args: f (function): Function to wrap. *partial_args: Partial arguments.",
". import _dispatch __all__ = [ \"is_framework_module\", \"modules\", \"register_module\", \"models\", \"register_model\", \"composite_coders\", \"register_composite_coder\",",
"first(): if state[\"first\"]: state[\"first\"] = False return True else: return False prev =",
"== 1: return x, lambda x: x else: def uncompress(x_after): return B.reshape(x_after, *b,",
"Sizes of dimensions to merge. Returns: tensor: Reshaped version of `z`. \"\"\" shape",
"coder def is_composite_coder(coder): \"\"\"Check if a coder is composite. Args: coder (coder): Coder.",
"a tensor into multiple dimensions. Args: z (tensor): Tensor to split. axis (int):",
"to split. axis (int): Axis to split sizes (iterable[int]): Sizes of new dimensions.",
"shape of the batch of a tensor. Args: x (tensor): Tensor. other_dims (int):",
"= slice(i, i + 1, None) return z[index] def with_first_last(xs): \"\"\"Return a generator",
"is first. bool: Element is last. object: Element. \"\"\" state = {\"first\": True}",
"tensor is not empty. Args: x (tensor): Tensor. Returns: bool: `True` if `x`",
"= True if not have_prev: # We will need a `prev`, but there",
"is composite. Args: coder (coder): Coder. Returns: bool: Coder is composite. \"\"\" return",
"to split. sizes (iterable[int]): Sizes of the components. axis (int): Axis. Returns: list[tensor]:",
"True else: return False prev = None have_prev = False cur = None",
"*B.shape(x)[len(b) :]), uncompress def split(z, sizes, axis): \"\"\"Split a tensor into multiple tensors.",
"Returns: list[tensor]: Components of the split. \"\"\" axis = resolve_axis(z, axis) index =",
"Tensor to split. axis (int): Axis to split sizes (iterable[int]): Sizes of new",
"if len(b) == 1: return x, lambda x: x else: def uncompress(x_after): return",
"list[tensor]: Components of the split. \"\"\" axis = resolve_axis(z, axis) index = [slice(None,",
"\"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\", \"batch\", \"compress_batch_dimensions\", \"split\", \"split_dimension\", \"merge_dimensions\", \"select\", \"with_first_last\", ] @_dispatch def",
"Args: x (object): Object to check. Returns: bool: `True` if `x` is a",
"composite_coders.append(coder) return coder def is_composite_coder(coder): \"\"\"Check if a coder is composite. Args: coder",
"return f(*partial_args, *args, **partial_kw_args, **kw_args) return wrapped_f def is_nonempty(x): \"\"\"Check if a tensor",
"\"\"\" axis = resolve_axis(z, axis) index = [slice(None, None, None)] * B.rank(z) components",
"**partial_kw_args, **kw_args) return wrapped_f def is_nonempty(x): \"\"\"Check if a tensor is not empty.",
"import resolve_axis from . import _dispatch __all__ = [ \"is_framework_module\", \"modules\", \"register_module\", \"models\",",
"if not have_prev: # We will need a `prev`, but there is no",
"as np from lab.util import resolve_axis from . import _dispatch __all__ = [",
"already set. \"\"\" @wraps(f) def wrapped_f(*args, **kw_args): return f(*partial_args, *args, **partial_kw_args, **kw_args) return",
"resolve_axis(z, axis) return B.reshape( z, *shape[: axis - len(sizes) + 1], np.prod(sizes), *shape[axis",
"lab.util import resolve_axis from . import _dispatch __all__ = [ \"is_framework_module\", \"modules\", \"register_module\",",
"Axis to select from. Returns: tensor: Selection from `z`. \"\"\" axis = resolve_axis(z,",
"sizes: index[axis] = slice(i, i + size, None) components.append(z[tuple(index)]) i += size return",
"\"composite_coders\", \"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\", \"batch\", \"compress_batch_dimensions\", \"split\", \"split_dimension\", \"merge_dimensions\", \"select\", \"with_first_last\", ]",
"return module models = [] #: Registered models def register_model(model): \"\"\"Decorator to register",
"resolve the index. axis = resolve_axis(z, axis) return B.reshape( z, *shape[: axis -",
"[ \"is_framework_module\", \"modules\", \"register_module\", \"models\", \"register_model\", \"composite_coders\", \"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\", \"batch\", \"compress_batch_dimensions\",",
"dimension. Args: x (tensor): Tensor to compress. other_dims (int): Number of non-batch dimensions.",
"x else: def uncompress(x_after): return B.reshape(x_after, *b, *B.shape(x_after)[1:]) return B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b) :]),",
"split_dimension(z, axis, sizes): \"\"\"Split a dimension of a tensor into multiple dimensions. Args:",
"the index. axis = resolve_axis(z, axis) return B.reshape( z, *shape[: axis - len(sizes)",
"will need a `prev`, but there is no `prev` yet. Take the current",
"def register_composite_coder(coder): \"\"\"Decorator to register a composite coder.\"\"\" composite_coders.append(coder) return coder def is_composite_coder(coder):",
"no `prev` yet. Take the current one as # `prev` and skip to",
"**partial_kw_args): \"\"\"Like :func:`functools.partial`, but preserves the docstring. Args: f (function): Function to wrap.",
"axis - len(sizes) + 1], np.prod(sizes), *shape[axis + 1 :], ) def select(z,",
"= None have_prev = False cur = None have_cur = False for x",
"need a `prev`, but there is no `prev` yet. Take the current one",
"False for x in xs: cur = x have_cur = True if not",
"and keyword arguments already set. \"\"\" @wraps(f) def wrapped_f(*args, **kw_args): return f(*partial_args, *args,",
"b = batch(x, other_dims) if len(b) == 1: return x, lambda x: x",
"\"\"\" return any([isinstance(coder, c) for c in composite_coders]) def wrapped_partial(f, *partial_args, **partial_kw_args): \"\"\"Like",
"otherwise `False`. \"\"\" return all([i > 0 for i in B.shape(x)]) def batch(x,",
"bool: `True` if `x` is not empty, otherwise `False`. \"\"\" return all([i >",
"[] #: Registered modules def register_module(module): \"\"\"Decorator to register a new module.\"\"\" modules.append(module)",
"Tensor to select from. i (int): Index to select. axis (int): Axis to",
"dimensions. \"\"\" return B.shape(x)[:-other_dims] def compress_batch_dimensions(x, other_dims): \"\"\"Compress multiple batch dimensions of a",
"= [] #: Registered modules def register_module(module): \"\"\"Decorator to register a new module.\"\"\"",
"the batch of a tensor. Args: x (tensor): Tensor. other_dims (int): Number of",
"coder is composite. Args: coder (coder): Coder. Returns: bool: Coder is composite. \"\"\"",
"# We currently have available `prev` and `cur`. We will return `prev` and,",
"= [] i = 0 for size in sizes: index[axis] = slice(i, i",
"next iteration. prev = cur have_prev = True continue # We currently have",
"Registered modules def register_module(module): \"\"\"Decorator to register a new module.\"\"\" modules.append(module) return module",
"batch(x, other_dims) if len(b) == 1: return x, lambda x: x else: def",
"loop has finished, return `cur` as the last one. yield first(), False, prev",
"(tensor): Tensor. other_dims (int): Number of non-batch dimensions. Returns: tuple[int]: Shape of batch",
"\"batch\", \"compress_batch_dimensions\", \"split\", \"split_dimension\", \"merge_dimensions\", \"select\", \"with_first_last\", ] @_dispatch def is_framework_module(x): \"\"\"Check if",
"tensor into multiple tensors. Args: z (tensor): Tensor to split. sizes (iterable[int]): Sizes",
"Args: z (tensor): Tensor to merge. axis (int): Axis to merge into. sizes",
"register a composite coder.\"\"\" composite_coders.append(coder) return coder def is_composite_coder(coder): \"\"\"Check if a coder",
"a dimension of a tensor into multiple dimensions. Args: z (tensor): Tensor to",
"\"\"\"Merge dimensions of a tensor into one dimension. This operation is the opposite",
"+ 1 :], ) def select(z, i, axis): \"\"\"Select a particular index `i`",
"(int): Index to select. axis (int): Axis to select from. Returns: tensor: Selection",
"Selection from `z`. \"\"\" axis = resolve_axis(z, axis) index = [slice(None, None, None)",
"(tensor): Tensor to split. axis (int): Axis to split sizes (iterable[int]): Sizes of",
"models.append(model) return model composite_coders = [] #: Composite coders def register_composite_coder(coder): \"\"\"Decorator to",
"None)] * B.rank(z) components = [] i = 0 for size in sizes:",
"of a tensor into one dimension. This operation is the opposite of :func:`split_dimension`.",
"models def register_model(model): \"\"\"Decorator to register a new model.\"\"\" models.append(model) return model composite_coders",
"= True continue # We currently have available `prev` and `cur`. We will",
"and `cur`. We will return `prev` and, # after the loop has finished,",
"tensor into a single batch dimension. Args: x (tensor): Tensor to compress. other_dims",
"is last. object: Element. \"\"\" state = {\"first\": True} def first(): if state[\"first\"]:",
"0 for size in sizes: index[axis] = slice(i, i + size, None) components.append(z[tuple(index)])",
"Returns: tuple[int]: Shape of batch dimensions. \"\"\" return B.shape(x)[:-other_dims] def compress_batch_dimensions(x, other_dims): \"\"\"Compress",
"\"split_dimension\", \"merge_dimensions\", \"select\", \"with_first_last\", ] @_dispatch def is_framework_module(x): \"\"\"Check if something is a",
"yet. Take the current one as # `prev` and skip to the next",
"__all__ = [ \"is_framework_module\", \"modules\", \"register_module\", \"models\", \"register_model\", \"composite_coders\", \"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\",",
"x (tensor): Tensor to compress. other_dims (int): Number of non-batch dimensions. Returns: tensor:",
":]), uncompress def split(z, sizes, axis): \"\"\"Split a tensor into multiple tensors. Args:",
"size, None) components.append(z[tuple(index)]) i += size return components def split_dimension(z, axis, sizes): \"\"\"Split",
"not have_prev: # We will need a `prev`, but there is no `prev`",
"into one dimension. This operation is the opposite of :func:`split_dimension`. Args: z (tensor):",
"a generator which indicates whether the returned element is the first or last.",
"1, None) return z[index] def with_first_last(xs): \"\"\"Return a generator which indicates whether the",
"tensor: Reshaped version of `z`. \"\"\" shape = B.shape(z) # The indexing below",
"(tensor): Tensor to select from. i (int): Index to select. axis (int): Axis",
"Returns: bool: Coder is composite. \"\"\" return any([isinstance(coder, c) for c in composite_coders])",
"to the next iteration. prev = cur have_prev = True continue # We",
"select from. Returns: tensor: Selection from `z`. \"\"\" axis = resolve_axis(z, axis) index",
"return model composite_coders = [] #: Composite coders def register_composite_coder(coder): \"\"\"Decorator to register",
"or last. Args: xs: Generator to wrap. Yields: bool: Element is first. bool:",
"Coder is composite. \"\"\" return any([isinstance(coder, c) for c in composite_coders]) def wrapped_partial(f,",
"xs: Generator to wrap. Yields: bool: Element is first. bool: Element is last.",
"if state[\"first\"]: state[\"first\"] = False return True else: return False prev = None",
"Returns: tensor: `x` with batch dimensions compressed. function: Function to undo the compression",
"will return `prev` and, # after the loop has finished, return `cur` as",
"\"\"\"Get the shape of the batch of a tensor. Args: x (tensor): Tensor.",
"+ 1, None) return z[index] def with_first_last(xs): \"\"\"Return a generator which indicates whether",
"not empty. Args: x (tensor): Tensor. Returns: bool: `True` if `x` is not",
"batch dimensions. \"\"\" b = batch(x, other_dims) if len(b) == 1: return x,",
"of :func:`split_dimension`. Args: z (tensor): Tensor to merge. axis (int): Axis to merge",
"index = [slice(None, None, None)] * B.rank(z) components = [] i = 0",
"wrapped_partial(f, *partial_args, **partial_kw_args): \"\"\"Like :func:`functools.partial`, but preserves the docstring. Args: f (function): Function",
"for positive `axis`, so resolve the index. axis = resolve_axis(z, axis) return B.reshape(z,",
"[] #: Registered models def register_model(model): \"\"\"Decorator to register a new model.\"\"\" models.append(model)",
"i in B.shape(x)]) def batch(x, other_dims): \"\"\"Get the shape of the batch of",
"with batch dimensions compressed. function: Function to undo the compression of the batch",
"Components of the split. \"\"\" axis = resolve_axis(z, axis) index = [slice(None, None,",
"1 :]) def merge_dimensions(z, axis, sizes): \"\"\"Merge dimensions of a tensor into one",
"[slice(None, None, None) for _ in range(B.rank(z))] index[axis] = slice(i, i + 1,",
"a new module.\"\"\" modules.append(module) return module models = [] #: Registered models def",
"split(z, sizes, axis): \"\"\"Split a tensor into multiple tensors. Args: z (tensor): Tensor",
"= False for x in xs: cur = x have_cur = True if",
"axis) index = [slice(None, None, None)] * B.rank(z) components = [] i =",
"to check. Returns: bool: `True` if `x` is a framework module, else `False`.",
"so resolve the index. axis = resolve_axis(z, axis) return B.reshape(z, *shape[:axis], *sizes, *shape[axis",
"module.\"\"\" modules.append(module) return module models = [] #: Registered models def register_model(model): \"\"\"Decorator",
"is no `prev` yet. Take the current one as # `prev` and skip",
"axis = resolve_axis(z, axis) return B.reshape( z, *shape[: axis - len(sizes) + 1],",
"c) for c in composite_coders]) def wrapped_partial(f, *partial_args, **partial_kw_args): \"\"\"Like :func:`functools.partial`, but preserves",
"\"merge_dimensions\", \"select\", \"with_first_last\", ] @_dispatch def is_framework_module(x): \"\"\"Check if something is a framework",
"resolve_axis(z, axis) index = [slice(None, None, None) for _ in range(B.rank(z))] index[axis] =",
"one as # `prev` and skip to the next iteration. prev = cur",
"after the loop has finished, return `cur` as the last one. yield first(),",
"is a framework module, else `False`. \"\"\" return False modules = [] #:",
"from. i (int): Index to select. axis (int): Axis to select from. Returns:",
"framework module, else `False`. \"\"\" return False modules = [] #: Registered modules",
"<reponame>tom-andersson/neuralprocesses from functools import wraps import lab as B import numpy as np",
"def batch(x, other_dims): \"\"\"Get the shape of the batch of a tensor. Args:",
"False return True else: return False prev = None have_prev = False cur",
"return any([isinstance(coder, c) for c in composite_coders]) def wrapped_partial(f, *partial_args, **partial_kw_args): \"\"\"Like :func:`functools.partial`,",
"the components. axis (int): Axis. Returns: list[tensor]: Components of the split. \"\"\" axis",
"\"\"\"Like :func:`functools.partial`, but preserves the docstring. Args: f (function): Function to wrap. *partial_args:",
"\"\"\"Compress multiple batch dimensions of a tensor into a single batch dimension. Args:",
"[slice(None, None, None)] * B.rank(z) components = [] i = 0 for size",
"compression of the batch dimensions. \"\"\" b = batch(x, other_dims) if len(b) ==",
"= False cur = None have_cur = False for x in xs: cur",
"We currently have available `prev` and `cur`. We will return `prev` and, #",
"from . import _dispatch __all__ = [ \"is_framework_module\", \"modules\", \"register_module\", \"models\", \"register_model\", \"composite_coders\",",
"lab as B import numpy as np from lab.util import resolve_axis from .",
"False modules = [] #: Registered modules def register_module(module): \"\"\"Decorator to register a",
"single batch dimension. Args: x (tensor): Tensor to compress. other_dims (int): Number of",
"z (tensor): Tensor to merge. axis (int): Axis to merge into. sizes (iterable[int]):",
"None) components.append(z[tuple(index)]) i += size return components def split_dimension(z, axis, sizes): \"\"\"Split a",
"Returns: tensor: Selection from `z`. \"\"\" axis = resolve_axis(z, axis) index = [slice(None,",
"Returns: function: Version of `f` with some arguments and keyword arguments already set.",
"\"\"\"Select a particular index `i` at axis `axis` without squeezing the tensor. Args:",
"B.shape(z) # The indexing below will only be correct for positive `axis`, so",
"return B.reshape(z, *shape[:axis], *sizes, *shape[axis + 1 :]) def merge_dimensions(z, axis, sizes): \"\"\"Merge",
"`cur` as the last one. yield first(), False, prev prev = cur if",
"Number of non-batch dimensions. Returns: tensor: `x` with batch dimensions compressed. function: Function",
"return B.reshape( z, *shape[: axis - len(sizes) + 1], np.prod(sizes), *shape[axis + 1",
"undo the compression of the batch dimensions. \"\"\" b = batch(x, other_dims) if",
"i + 1, None) return z[index] def with_first_last(xs): \"\"\"Return a generator which indicates",
"sizes (iterable[int]): Sizes of the components. axis (int): Axis. Returns: list[tensor]: Components of",
"into multiple tensors. Args: z (tensor): Tensor to split. sizes (iterable[int]): Sizes of",
"is not empty. Args: x (tensor): Tensor. Returns: bool: `True` if `x` is",
"\"\"\" state = {\"first\": True} def first(): if state[\"first\"]: state[\"first\"] = False return",
"multiple dimensions. Args: z (tensor): Tensor to split. axis (int): Axis to split",
"**kw_args) return wrapped_f def is_nonempty(x): \"\"\"Check if a tensor is not empty. Args:",
"True if not have_prev: # We will need a `prev`, but there is",
"axis (int): Axis to split sizes (iterable[int]): Sizes of new dimensions. Returns: tensor:",
"of a tensor. Args: x (tensor): Tensor. other_dims (int): Number of non-batch dimensions.",
"uncompress def split(z, sizes, axis): \"\"\"Split a tensor into multiple tensors. Args: z",
"= resolve_axis(z, axis) return B.reshape( z, *shape[: axis - len(sizes) + 1], np.prod(sizes),",
"Number of non-batch dimensions. Returns: tuple[int]: Shape of batch dimensions. \"\"\" return B.shape(x)[:-other_dims]",
"the first or last. Args: xs: Generator to wrap. Yields: bool: Element is",
"#: Composite coders def register_composite_coder(coder): \"\"\"Decorator to register a composite coder.\"\"\" composite_coders.append(coder) return",
"(iterable[int]): Sizes of new dimensions. Returns: tensor: Reshaped version of `z`. \"\"\" shape",
"only be correct for positive `axis`, so resolve the index. axis = resolve_axis(z,",
"framework module. Args: x (object): Object to check. Returns: bool: `True` if `x`",
"x (object): Object to check. Returns: bool: `True` if `x` is a framework",
"(object): Object to check. Returns: bool: `True` if `x` is a framework module,",
"if a tensor is not empty. Args: x (tensor): Tensor. Returns: bool: `True`",
"np from lab.util import resolve_axis from . import _dispatch __all__ = [ \"is_framework_module\",",
"None) for _ in range(B.rank(z))] index[axis] = slice(i, i + 1, None) return",
"Version of `f` with some arguments and keyword arguments already set. \"\"\" @wraps(f)",
"dimensions compressed. function: Function to undo the compression of the batch dimensions. \"\"\"",
"else: def uncompress(x_after): return B.reshape(x_after, *b, *B.shape(x_after)[1:]) return B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b) :]), uncompress",
"Element. \"\"\" state = {\"first\": True} def first(): if state[\"first\"]: state[\"first\"] = False",
"Tensor to split. sizes (iterable[int]): Sizes of the components. axis (int): Axis. Returns:",
"for size in sizes: index[axis] = slice(i, i + size, None) components.append(z[tuple(index)]) i",
"None) return z[index] def with_first_last(xs): \"\"\"Return a generator which indicates whether the returned",
"return components def split_dimension(z, axis, sizes): \"\"\"Split a dimension of a tensor into",
"*partial_args: Partial arguments. **partial_kw_args: Partial keyword arguments. Returns: function: Version of `f` with",
"arguments and keyword arguments already set. \"\"\" @wraps(f) def wrapped_f(*args, **kw_args): return f(*partial_args,",
"to merge. Returns: tensor: Reshaped version of `z`. \"\"\" shape = B.shape(z) #",
"axis, sizes): \"\"\"Merge dimensions of a tensor into one dimension. This operation is",
"as B import numpy as np from lab.util import resolve_axis from . import",
"for positive `axis`, so resolve the index. axis = resolve_axis(z, axis) return B.reshape(",
"axis) return B.reshape( z, *shape[: axis - len(sizes) + 1], np.prod(sizes), *shape[axis +",
"split. \"\"\" axis = resolve_axis(z, axis) index = [slice(None, None, None)] * B.rank(z)",
"None have_cur = False for x in xs: cur = x have_cur =",
"\"register_module\", \"models\", \"register_model\", \"composite_coders\", \"register_composite_coder\", \"is_composite_coder\", \"wrapped_partial\", \"is_nonempty\", \"batch\", \"compress_batch_dimensions\", \"split\", \"split_dimension\", \"merge_dimensions\",",
"= False return True else: return False prev = None have_prev = False",
"1], np.prod(sizes), *shape[axis + 1 :], ) def select(z, i, axis): \"\"\"Select a",
"\"\"\"Decorator to register a composite coder.\"\"\" composite_coders.append(coder) return coder def is_composite_coder(coder): \"\"\"Check if",
"(int): Axis to select from. Returns: tensor: Selection from `z`. \"\"\" axis =",
"indicates whether the returned element is the first or last. Args: xs: Generator",
"wraps import lab as B import numpy as np from lab.util import resolve_axis",
"generator which indicates whether the returned element is the first or last. Args:",
"uncompress(x_after): return B.reshape(x_after, *b, *B.shape(x_after)[1:]) return B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b) :]), uncompress def split(z,",
"def merge_dimensions(z, axis, sizes): \"\"\"Merge dimensions of a tensor into one dimension. This",
"*sizes, *shape[axis + 1 :]) def merge_dimensions(z, axis, sizes): \"\"\"Merge dimensions of a",
"a tensor. Args: x (tensor): Tensor. other_dims (int): Number of non-batch dimensions. Returns:",
"cur = x have_cur = True if not have_prev: # We will need",
"Composite coders def register_composite_coder(coder): \"\"\"Decorator to register a composite coder.\"\"\" composite_coders.append(coder) return coder",
"to select from. i (int): Index to select. axis (int): Axis to select",
"axis `axis` without squeezing the tensor. Args: z (tensor): Tensor to select from.",
"skip to the next iteration. prev = cur have_prev = True continue #",
"bool: Element is first. bool: Element is last. object: Element. \"\"\" state =",
"axis): \"\"\"Select a particular index `i` at axis `axis` without squeezing the tensor.",
":func:`functools.partial`, but preserves the docstring. Args: f (function): Function to wrap. *partial_args: Partial",
"and, # after the loop has finished, return `cur` as the last one.",
"from. Returns: tensor: Selection from `z`. \"\"\" axis = resolve_axis(z, axis) index =",
"for c in composite_coders]) def wrapped_partial(f, *partial_args, **partial_kw_args): \"\"\"Like :func:`functools.partial`, but preserves the",
"= x have_cur = True if not have_prev: # We will need a",
"to compress. other_dims (int): Number of non-batch dimensions. Returns: tensor: `x` with batch",
"Generator to wrap. Yields: bool: Element is first. bool: Element is last. object:",
"batch dimensions compressed. function: Function to undo the compression of the batch dimensions.",
"merge into. sizes (iterable[int]): Sizes of dimensions to merge. Returns: tensor: Reshaped version",
"B.reshape( z, *shape[: axis - len(sizes) + 1], np.prod(sizes), *shape[axis + 1 :],",
"B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b) :]), uncompress def split(z, sizes, axis): \"\"\"Split a tensor into",
"B.rank(z) components = [] i = 0 for size in sizes: index[axis] =",
"from `z`. \"\"\" axis = resolve_axis(z, axis) index = [slice(None, None, None) for",
"# `prev` and skip to the next iteration. prev = cur have_prev =",
"= 0 for size in sizes: index[axis] = slice(i, i + size, None)",
"@wraps(f) def wrapped_f(*args, **kw_args): return f(*partial_args, *args, **partial_kw_args, **kw_args) return wrapped_f def is_nonempty(x):",
"return wrapped_f def is_nonempty(x): \"\"\"Check if a tensor is not empty. Args: x",
"check. Returns: bool: `True` if `x` is a framework module, else `False`. \"\"\"",
"available `prev` and `cur`. We will return `prev` and, # after the loop",
"xs: cur = x have_cur = True if not have_prev: # We will",
"+ 1 :]) def merge_dimensions(z, axis, sizes): \"\"\"Merge dimensions of a tensor into",
"dimensions. \"\"\" b = batch(x, other_dims) if len(b) == 1: return x, lambda",
"z (tensor): Tensor to split. sizes (iterable[int]): Sizes of the components. axis (int):",
"(int): Axis. Returns: list[tensor]: Components of the split. \"\"\" axis = resolve_axis(z, axis)",
"(int): Axis to merge into. sizes (iterable[int]): Sizes of dimensions to merge. Returns:",
"Take the current one as # `prev` and skip to the next iteration.",
"return B.reshape(x_after, *b, *B.shape(x_after)[1:]) return B.reshape(x, int(np.prod(b)), *B.shape(x)[len(b) :]), uncompress def split(z, sizes,",
"compressed. function: Function to undo the compression of the batch dimensions. \"\"\" b"
] |
[
"similar words and its similarity as a list of tuples Example ------- >>>",
"This method is **a lot** slower than `self.cosine` and results are the almost",
"else: return temp[0] def get_vector(self, word): ''' Returns the (l2norm) vector for `word`",
"---------- word : string word in the vocabulary to calculate the vectors n",
"= np.dot(self.l2norm, mean) best = similarities.argsort()[::-1][:n + len(words) - 1] return self.generate_response(best, similarities,",
"int, optional (default 10) number of neighbors to return Returns ------- dict: with",
"class WordVectors(object): def __init__(self, vocab=None, vectors=None, saveMemory=True): self.vocab = vocab if not saveMemory:",
"= np.empty(self.vocab.shape) for idx, vector in enumerate(self.vectors): metric[idx] = distance.cosine(target_vec, vector) best =",
"if isinstance(words, basestring): words = [words] targets = np.vstack((self.get_vector(word) for word in words))",
"Analogy similarity. Parameters ---------- pos : list neg : list Returns ------- List",
"for `word` ''' temp = np.where(self.vocab == word)[0] if temp.size == 0: raise",
"in words)) metrics = np.dot(self.l2norm, targets.T) ans = {} for col, word in",
"metric.argsort()[:n + 1] return self.generate_response(best, metric, exclude=word) def analogy(self, pos, neg, n=10): '''",
"for idx, vector in enumerate(self.vectors): metric[idx] = distance.cosine(target_vec, vector) best = metric.argsort()[:n +",
"------- `king - man + woman = queen` will be: `pos=['king', 'woman'], neg=['man']`",
"= np.dot(self.l2norm, targets.T) ans = {} for col, word in enumerate(words): best =",
"= self.generate_response(best, metrics[:, col], exclude=word) ans[word] = best return ans def _cosine(self, word,",
"and results are the almost the same, really just use `self.cosine` This is",
"self.generate_response(best, metrics[:, col], exclude=word) ans[word] = best return ans def _cosine(self, word, n=10):",
"distance.cosine(target_vec, vector) best = metric.argsort()[:n + 1] return self.generate_response(best, metric, exclude=word) def analogy(self,",
"-1.0) for word in neg] mean = [] for word, direction in pos",
"utf-8 import numpy as np from word2vec.utils import unitvec class WordVectors(object): def __init__(self,",
"0.94757425919916516), ('yellow', 0.94640807944950878)] } ''' if isinstance(words, basestring): words = [words] targets =",
"in exclude] def cosine(self, words, n=10): ''' Cosine similarity. metric = dot(l2norm_of_vectors, l2norm_of_target_vector)",
"idx = self.ix(word) return self.l2norm[idx] def __getitem__(self, word): return self.get_vector(word) def generate_response(self, indexes,",
"[('white', 0.94757425919916516), ('yellow', 0.94640807944950878)] } ''' if isinstance(words, basestring): words = [words] targets",
"tuple is: (vocab[i], metric[i]) ''' if isinstance(exclude, basestring): exclude = [exclude] return [(word,",
"``` ``` {'black': [('white', 0.94757425919916516), ('yellow', 0.94640807944950878)] } ''' if isinstance(words, basestring): words",
"''' Cosine distance using scipy.distance.cosine Note: This method is **a lot** slower than",
"self.generate_response(best, metric, exclude=word) def analogy(self, pos, neg, n=10): ''' Analogy similarity. Parameters ----------",
"word, n=10): ''' Cosine distance using scipy.distance.cosine Note: This method is **a lot**",
"self.ix(word) return self.l2norm[idx] def __getitem__(self, word): return self.get_vector(word) def generate_response(self, indexes, metric, exclude=''):",
"word in words)) metrics = np.dot(self.l2norm, targets.T) ans = {} for col, word",
"col])[::-1][:n + 1] best = self.generate_response(best, metrics[:, col], exclude=word) ans[word] = best return",
"best return ans def _cosine(self, word, n=10): ''' Cosine distance using scipy.distance.cosine Note:",
"not in vocabulary') else: return temp[0] def get_vector(self, word): ''' Returns the (l2norm)",
"the index on self.vocab and self.l2norm for `word` ''' temp = np.where(self.vocab ==",
"the vectors n : int, optional (default 10) number of neighbors to return",
"direction in pos + neg: mean.append(direction * unitvec(self.get_vector(word))) mean = np.array(mean).mean(axis=0) similarities =",
"vocabulary ''' idx = self.ix(word) return self.l2norm[idx] def __getitem__(self, word): return self.get_vector(word) def",
"ans[word] = best return ans def _cosine(self, word, n=10): ''' Cosine distance using",
"l2norm of the vectors Parameters ---------- words : string or list of string",
"''' Returns the (l2norm) vector for `word` in the vocabulary ''' idx =",
"return self.l2norm[idx] def __getitem__(self, word): return self.get_vector(word) def generate_response(self, indexes, metric, exclude=''): '''",
"Uses a precomputed l2norm of the vectors Parameters ---------- words : string or",
"just available for testing. Requires: `__init__(..., saveMemory=False)` Parameters ---------- word : string word",
"def __init__(self, vocab=None, vectors=None, saveMemory=True): self.vocab = vocab if not saveMemory: self.vectors =",
"the almost the same, really just use `self.cosine` This is just available for",
"def get_vector(self, word): ''' Returns the (l2norm) vector for `word` in the vocabulary",
"vector for `word` in the vocabulary ''' idx = self.ix(word) return self.l2norm[idx] def",
"analogy(self, pos, neg, n=10): ''' Analogy similarity. Parameters ---------- pos : list neg",
"is **a lot** slower than `self.cosine` and results are the almost the same,",
"metric[i]) ''' if isinstance(exclude, basestring): exclude = [exclude] return [(word, sim) for word,",
"n : int, optional (default 10) number of neighbors to return ''' from",
"(l2norm) vector for `word` in the vocabulary ''' idx = self.ix(word) return self.l2norm[idx]",
"Cosine distance using scipy.distance.cosine Note: This method is **a lot** slower than `self.cosine`",
"1] best = self.generate_response(best, metrics[:, col], exclude=word) ans[word] = best return ans def",
"or list of string word(s) in the vocabulary to calculate the vectors n",
"on self.vocab and self.l2norm for `word` ''' temp = np.where(self.vocab == word)[0] if",
"for testing. Requires: `__init__(..., saveMemory=False)` Parameters ---------- word : string word in the",
"than `self.cosine` and results are the almost the same, really just use `self.cosine`",
"really just use `self.cosine` This is just available for testing. Requires: `__init__(..., saveMemory=False)`",
"Requires: `__init__(..., saveMemory=False)` Parameters ---------- word : string word in the vocabulary to",
"word in pos] neg = [(word, -1.0) for word in neg] mean =",
"int, optional (default 10) number of neighbors to return ''' from scipy.spatial import",
"import distance target_vec = self[word] metric = np.empty(self.vocab.shape) for idx, vector in enumerate(self.vectors):",
"dict: with the n similar words and its similarity as a list of",
"in enumerate(self.vectors): metric[idx] = distance.cosine(target_vec, vector) best = metric.argsort()[:n + 1] return self.generate_response(best,",
"metric = np.empty(self.vocab.shape) for idx, vector in enumerate(self.vectors): metric[idx] = distance.cosine(target_vec, vector) best",
"to calculate the vectors n : int, optional (default 10) number of neighbors",
"targets.T) ans = {} for col, word in enumerate(words): best = np.argsort(metrics[:, col])[::-1][:n",
"if isinstance(exclude, basestring): exclude = [exclude] return [(word, sim) for word, sim in",
"indexes, metric, exclude=''): ''' Generates a response as a list of tuples based",
"word2vec.utils import unitvec class WordVectors(object): def __init__(self, vocab=None, vectors=None, saveMemory=True): self.vocab = vocab",
"not saveMemory: self.vectors = vectors self.l2norm = np.vstack(unitvec(vec) for vec in vectors) def",
"if temp.size == 0: raise KeyError('Word not in vocabulary') else: return temp[0] def",
"pos = [(word, 1.0) for word in pos] neg = [(word, -1.0) for",
"return ans def _cosine(self, word, n=10): ''' Cosine distance using scipy.distance.cosine Note: This",
"in neg] mean = [] for word, direction in pos + neg: mean.append(direction",
"exclude=word) def analogy(self, pos, neg, n=10): ''' Analogy similarity. Parameters ---------- pos :",
"word, direction in pos + neg: mean.append(direction * unitvec(self.get_vector(word))) mean = np.array(mean).mean(axis=0) similarities",
"optional (default 10) number of neighbors to return ''' from scipy.spatial import distance",
"def ix(self, word): ''' Returns the index on self.vocab and self.l2norm for `word`",
"1.0) for word in pos] neg = [(word, -1.0) for word in neg]",
": string or list of string word(s) in the vocabulary to calculate the",
"word, sim in zip(self.vocab[indexes], metric[indexes]) if word not in exclude] def cosine(self, words,",
"Note: This method is **a lot** slower than `self.cosine` and results are the",
"word : string word in the vocabulary to calculate the vectors n :",
"slower than `self.cosine` and results are the almost the same, really just use",
"`__init__(..., saveMemory=False)` Parameters ---------- word : string word in the vocabulary to calculate",
"= self[word] metric = np.empty(self.vocab.shape) for idx, vector in enumerate(self.vectors): metric[idx] = distance.cosine(target_vec,",
"from word2vec.utils import unitvec class WordVectors(object): def __init__(self, vocab=None, vectors=None, saveMemory=True): self.vocab =",
"self.vectors = vectors self.l2norm = np.vstack(unitvec(vec) for vec in vectors) def ix(self, word):",
"+ 1] return self.generate_response(best, metric, exclude=word) def analogy(self, pos, neg, n=10): ''' Analogy",
"return Returns ------- dict: with the n similar words and its similarity as",
"n similar words and its similarity as a list of tuples Example -------",
"(vocab[i], metric[i]) ''' if isinstance(exclude, basestring): exclude = [exclude] return [(word, sim) for",
"precomputed l2norm of the vectors Parameters ---------- words : string or list of",
"= self.ix(word) return self.l2norm[idx] def __getitem__(self, word): return self.get_vector(word) def generate_response(self, indexes, metric,",
"# coding: utf-8 import numpy as np from word2vec.utils import unitvec class WordVectors(object):",
"= pos + neg pos = [(word, 1.0) for word in pos] neg",
"saveMemory=False)` Parameters ---------- word : string word in the vocabulary to calculate the",
"vocab if not saveMemory: self.vectors = vectors self.l2norm = np.vstack(unitvec(vec) for vec in",
"neg, n=10): ''' Analogy similarity. Parameters ---------- pos : list neg : list",
"_cosine(self, word, n=10): ''' Cosine distance using scipy.distance.cosine Note: This method is **a",
"= distance.cosine(target_vec, vector) best = metric.argsort()[:n + 1] return self.generate_response(best, metric, exclude=word) def",
"= np.vstack(unitvec(vec) for vec in vectors) def ix(self, word): ''' Returns the index",
"the (l2norm) vector for `word` in the vocabulary ''' idx = self.ix(word) return",
"Cosine similarity. metric = dot(l2norm_of_vectors, l2norm_of_target_vector) Uses a precomputed l2norm of the vectors",
"word): ''' Returns the (l2norm) vector for `word` in the vocabulary ''' idx",
"col, word in enumerate(words): best = np.argsort(metrics[:, col])[::-1][:n + 1] best = self.generate_response(best,",
"= [] for word, direction in pos + neg: mean.append(direction * unitvec(self.get_vector(word))) mean",
"exclude=''): ''' Generates a response as a list of tuples based on the",
"target_vec = self[word] metric = np.empty(self.vocab.shape) for idx, vector in enumerate(self.vectors): metric[idx] =",
"for word, sim in zip(self.vocab[indexes], metric[indexes]) if word not in exclude] def cosine(self,",
"list of tuples Example ------- >>> model.cosine('black', n=2) ``` ``` {'black': [('white', 0.94757425919916516),",
"each tuple is (word, similarity) Example ------- `king - man + woman =",
"''' if isinstance(exclude, basestring): exclude = [exclude] return [(word, sim) for word, sim",
"metric[idx] = distance.cosine(target_vec, vector) best = metric.argsort()[:n + 1] return self.generate_response(best, metric, exclude=word)",
"''' Cosine similarity. metric = dot(l2norm_of_vectors, l2norm_of_target_vector) Uses a precomputed l2norm of the",
"''' Generates a response as a list of tuples based on the indexes",
"of tuples, each tuple is (word, similarity) Example ------- `king - man +",
"import numpy as np from word2vec.utils import unitvec class WordVectors(object): def __init__(self, vocab=None,",
"similarity. metric = dot(l2norm_of_vectors, l2norm_of_target_vector) Uses a precomputed l2norm of the vectors Parameters",
"results are the almost the same, really just use `self.cosine` This is just",
"word in the vocabulary to calculate the vectors n : int, optional (default",
"''' Returns the index on self.vocab and self.l2norm for `word` ''' temp =",
"Returns ------- List of tuples, each tuple is (word, similarity) Example ------- `king",
"number of neighbors to return Returns ------- dict: with the n similar words",
"best = self.generate_response(best, metrics[:, col], exclude=word) ans[word] = best return ans def _cosine(self,",
"isinstance(exclude, basestring): exclude = [exclude] return [(word, sim) for word, sim in zip(self.vocab[indexes],",
"10) number of neighbors to return Returns ------- dict: with the n similar",
"n=10): ''' Cosine distance using scipy.distance.cosine Note: This method is **a lot** slower",
"''' Analogy similarity. Parameters ---------- pos : list neg : list Returns -------",
"distance target_vec = self[word] metric = np.empty(self.vocab.shape) for idx, vector in enumerate(self.vectors): metric[idx]",
"`self.cosine` This is just available for testing. Requires: `__init__(..., saveMemory=False)` Parameters ---------- word",
"= np.vstack((self.get_vector(word) for word in words)) metrics = np.dot(self.l2norm, targets.T) ans = {}",
": list Returns ------- List of tuples, each tuple is (word, similarity) Example",
"`king - man + woman = queen` will be: `pos=['king', 'woman'], neg=['man']` '''",
"the vocabulary ''' idx = self.ix(word) return self.l2norm[idx] def __getitem__(self, word): return self.get_vector(word)",
"response as a list of tuples based on the indexes Each tuple is:",
"Parameters ---------- word : string word in the vocabulary to calculate the vectors",
"'woman'], neg=['man']` ''' words = pos + neg pos = [(word, 1.0) for",
"List of tuples, each tuple is (word, similarity) Example ------- `king - man",
"---------- words : string or list of string word(s) in the vocabulary to",
"for vec in vectors) def ix(self, word): ''' Returns the index on self.vocab",
"`word` ''' temp = np.where(self.vocab == word)[0] if temp.size == 0: raise KeyError('Word",
"the vectors Parameters ---------- words : string or list of string word(s) in",
"This is just available for testing. Requires: `__init__(..., saveMemory=False)` Parameters ---------- word :",
"a list of tuples based on the indexes Each tuple is: (vocab[i], metric[i])",
"a precomputed l2norm of the vectors Parameters ---------- words : string or list",
"saveMemory: self.vectors = vectors self.l2norm = np.vstack(unitvec(vec) for vec in vectors) def ix(self,",
"= np.where(self.vocab == word)[0] if temp.size == 0: raise KeyError('Word not in vocabulary')",
"== 0: raise KeyError('Word not in vocabulary') else: return temp[0] def get_vector(self, word):",
"def __getitem__(self, word): return self.get_vector(word) def generate_response(self, indexes, metric, exclude=''): ''' Generates a",
"''' words = pos + neg pos = [(word, 1.0) for word in",
"= [(word, -1.0) for word in neg] mean = [] for word, direction",
"[exclude] return [(word, sim) for word, sim in zip(self.vocab[indexes], metric[indexes]) if word not",
"the same, really just use `self.cosine` This is just available for testing. Requires:",
"metric, exclude=word) def analogy(self, pos, neg, n=10): ''' Analogy similarity. Parameters ---------- pos",
"basestring): words = [words] targets = np.vstack((self.get_vector(word) for word in words)) metrics =",
"+ woman = queen` will be: `pos=['king', 'woman'], neg=['man']` ''' words = pos",
"optional (default 10) number of neighbors to return Returns ------- dict: with the",
"+ 1] best = self.generate_response(best, metrics[:, col], exclude=word) ans[word] = best return ans",
": list neg : list Returns ------- List of tuples, each tuple is",
"------- >>> model.cosine('black', n=2) ``` ``` {'black': [('white', 0.94757425919916516), ('yellow', 0.94640807944950878)] } '''",
"`pos=['king', 'woman'], neg=['man']` ''' words = pos + neg pos = [(word, 1.0)",
"[(word, sim) for word, sim in zip(self.vocab[indexes], metric[indexes]) if word not in exclude]",
"index on self.vocab and self.l2norm for `word` ''' temp = np.where(self.vocab == word)[0]",
"unitvec class WordVectors(object): def __init__(self, vocab=None, vectors=None, saveMemory=True): self.vocab = vocab if not",
"temp = np.where(self.vocab == word)[0] if temp.size == 0: raise KeyError('Word not in",
"zip(self.vocab[indexes], metric[indexes]) if word not in exclude] def cosine(self, words, n=10): ''' Cosine",
"raise KeyError('Word not in vocabulary') else: return temp[0] def get_vector(self, word): ''' Returns",
"vectors self.l2norm = np.vstack(unitvec(vec) for vec in vectors) def ix(self, word): ''' Returns",
"return self.get_vector(word) def generate_response(self, indexes, metric, exclude=''): ''' Generates a response as a",
"* unitvec(self.get_vector(word))) mean = np.array(mean).mean(axis=0) similarities = np.dot(self.l2norm, mean) best = similarities.argsort()[::-1][:n +",
"in vectors) def ix(self, word): ''' Returns the index on self.vocab and self.l2norm",
"for col, word in enumerate(words): best = np.argsort(metrics[:, col])[::-1][:n + 1] best =",
"np.array(mean).mean(axis=0) similarities = np.dot(self.l2norm, mean) best = similarities.argsort()[::-1][:n + len(words) - 1] return",
"= dot(l2norm_of_vectors, l2norm_of_target_vector) Uses a precomputed l2norm of the vectors Parameters ---------- words",
"idx, vector in enumerate(self.vectors): metric[idx] = distance.cosine(target_vec, vector) best = metric.argsort()[:n + 1]",
"= {} for col, word in enumerate(words): best = np.argsort(metrics[:, col])[::-1][:n + 1]",
"exclude=word) ans[word] = best return ans def _cosine(self, word, n=10): ''' Cosine distance",
"word(s) in the vocabulary to calculate the vectors n : int, optional (default",
"testing. Requires: `__init__(..., saveMemory=False)` Parameters ---------- word : string word in the vocabulary",
"neg pos = [(word, 1.0) for word in pos] neg = [(word, -1.0)",
"as np from word2vec.utils import unitvec class WordVectors(object): def __init__(self, vocab=None, vectors=None, saveMemory=True):",
"= np.array(mean).mean(axis=0) similarities = np.dot(self.l2norm, mean) best = similarities.argsort()[::-1][:n + len(words) - 1]",
"word in enumerate(words): best = np.argsort(metrics[:, col])[::-1][:n + 1] best = self.generate_response(best, metrics[:,",
"the indexes Each tuple is: (vocab[i], metric[i]) ''' if isinstance(exclude, basestring): exclude =",
"words and its similarity as a list of tuples Example ------- >>> model.cosine('black',",
"of neighbors to return ''' from scipy.spatial import distance target_vec = self[word] metric",
"best = np.argsort(metrics[:, col])[::-1][:n + 1] best = self.generate_response(best, metrics[:, col], exclude=word) ans[word]",
"WordVectors(object): def __init__(self, vocab=None, vectors=None, saveMemory=True): self.vocab = vocab if not saveMemory: self.vectors",
"vector) best = metric.argsort()[:n + 1] return self.generate_response(best, metric, exclude=word) def analogy(self, pos,",
"neighbors to return Returns ------- dict: with the n similar words and its",
"generate_response(self, indexes, metric, exclude=''): ''' Generates a response as a list of tuples",
"same, really just use `self.cosine` This is just available for testing. Requires: `__init__(...,",
"words : string or list of string word(s) in the vocabulary to calculate",
"in enumerate(words): best = np.argsort(metrics[:, col])[::-1][:n + 1] best = self.generate_response(best, metrics[:, col],",
"as a list of tuples Example ------- >>> model.cosine('black', n=2) ``` ``` {'black':",
"neg : list Returns ------- List of tuples, each tuple is (word, similarity)",
"Example ------- `king - man + woman = queen` will be: `pos=['king', 'woman'],",
"''' if isinstance(words, basestring): words = [words] targets = np.vstack((self.get_vector(word) for word in",
"mean = [] for word, direction in pos + neg: mean.append(direction * unitvec(self.get_vector(word)))",
"a response as a list of tuples based on the indexes Each tuple",
"scipy.spatial import distance target_vec = self[word] metric = np.empty(self.vocab.shape) for idx, vector in",
"------- List of tuples, each tuple is (word, similarity) Example ------- `king -",
"pos + neg: mean.append(direction * unitvec(self.get_vector(word))) mean = np.array(mean).mean(axis=0) similarities = np.dot(self.l2norm, mean)",
"enumerate(words): best = np.argsort(metrics[:, col])[::-1][:n + 1] best = self.generate_response(best, metrics[:, col], exclude=word)",
"distance using scipy.distance.cosine Note: This method is **a lot** slower than `self.cosine` and",
"similarity) Example ------- `king - man + woman = queen` will be: `pos=['king',",
"KeyError('Word not in vocabulary') else: return temp[0] def get_vector(self, word): ''' Returns the",
"best = metric.argsort()[:n + 1] return self.generate_response(best, metric, exclude=word) def analogy(self, pos, neg,",
"unitvec(self.get_vector(word))) mean = np.array(mean).mean(axis=0) similarities = np.dot(self.l2norm, mean) best = similarities.argsort()[::-1][:n + len(words)",
"exclude = [exclude] return [(word, sim) for word, sim in zip(self.vocab[indexes], metric[indexes]) if",
"get_vector(self, word): ''' Returns the (l2norm) vector for `word` in the vocabulary '''",
"__getitem__(self, word): return self.get_vector(word) def generate_response(self, indexes, metric, exclude=''): ''' Generates a response",
"sim in zip(self.vocab[indexes], metric[indexes]) if word not in exclude] def cosine(self, words, n=10):",
"queen` will be: `pos=['king', 'woman'], neg=['man']` ''' words = pos + neg pos",
"mean = np.array(mean).mean(axis=0) similarities = np.dot(self.l2norm, mean) best = similarities.argsort()[::-1][:n + len(words) -",
"= [words] targets = np.vstack((self.get_vector(word) for word in words)) metrics = np.dot(self.l2norm, targets.T)",
"10) number of neighbors to return ''' from scipy.spatial import distance target_vec =",
"`word` in the vocabulary ''' idx = self.ix(word) return self.l2norm[idx] def __getitem__(self, word):",
"def analogy(self, pos, neg, n=10): ''' Analogy similarity. Parameters ---------- pos : list",
"np.vstack(unitvec(vec) for vec in vectors) def ix(self, word): ''' Returns the index on",
"= [(word, 1.0) for word in pos] neg = [(word, -1.0) for word",
"Each tuple is: (vocab[i], metric[i]) ''' if isinstance(exclude, basestring): exclude = [exclude] return",
"for word in pos] neg = [(word, -1.0) for word in neg] mean",
"in the vocabulary to calculate the vectors n : int, optional (default 10)",
"for `word` in the vocabulary ''' idx = self.ix(word) return self.l2norm[idx] def __getitem__(self,",
"list Returns ------- List of tuples, each tuple is (word, similarity) Example -------",
"enumerate(self.vectors): metric[idx] = distance.cosine(target_vec, vector) best = metric.argsort()[:n + 1] return self.generate_response(best, metric,",
"is just available for testing. Requires: `__init__(..., saveMemory=False)` Parameters ---------- word : string",
"Parameters ---------- pos : list neg : list Returns ------- List of tuples,",
"= vectors self.l2norm = np.vstack(unitvec(vec) for vec in vectors) def ix(self, word): '''",
"self.get_vector(word) def generate_response(self, indexes, metric, exclude=''): ''' Generates a response as a list",
"word): ''' Returns the index on self.vocab and self.l2norm for `word` ''' temp",
"of tuples Example ------- >>> model.cosine('black', n=2) ``` ``` {'black': [('white', 0.94757425919916516), ('yellow',",
"vocabulary to calculate the vectors n : int, optional (default 10) number of",
"1] return self.generate_response(best, metric, exclude=word) def analogy(self, pos, neg, n=10): ''' Analogy similarity.",
"------- dict: with the n similar words and its similarity as a list",
"indexes Each tuple is: (vocab[i], metric[i]) ''' if isinstance(exclude, basestring): exclude = [exclude]",
"for word, direction in pos + neg: mean.append(direction * unitvec(self.get_vector(word))) mean = np.array(mean).mean(axis=0)",
"np.dot(self.l2norm, targets.T) ans = {} for col, word in enumerate(words): best = np.argsort(metrics[:,",
": int, optional (default 10) number of neighbors to return ''' from scipy.spatial",
"} ''' if isinstance(words, basestring): words = [words] targets = np.vstack((self.get_vector(word) for word",
"sim) for word, sim in zip(self.vocab[indexes], metric[indexes]) if word not in exclude] def",
"to return ''' from scipy.spatial import distance target_vec = self[word] metric = np.empty(self.vocab.shape)",
"string or list of string word(s) in the vocabulary to calculate the vectors",
"Returns the (l2norm) vector for `word` in the vocabulary ''' idx = self.ix(word)",
"of the vectors Parameters ---------- words : string or list of string word(s)",
">>> model.cosine('black', n=2) ``` ``` {'black': [('white', 0.94757425919916516), ('yellow', 0.94640807944950878)] } ''' if",
"self.l2norm = np.vstack(unitvec(vec) for vec in vectors) def ix(self, word): ''' Returns the",
": string word in the vocabulary to calculate the vectors n : int,",
"and self.l2norm for `word` ''' temp = np.where(self.vocab == word)[0] if temp.size ==",
"in vocabulary') else: return temp[0] def get_vector(self, word): ''' Returns the (l2norm) vector",
"its similarity as a list of tuples Example ------- >>> model.cosine('black', n=2) ```",
"isinstance(words, basestring): words = [words] targets = np.vstack((self.get_vector(word) for word in words)) metrics",
"in the vocabulary ''' idx = self.ix(word) return self.l2norm[idx] def __getitem__(self, word): return",
"from scipy.spatial import distance target_vec = self[word] metric = np.empty(self.vocab.shape) for idx, vector",
"use `self.cosine` This is just available for testing. Requires: `__init__(..., saveMemory=False)` Parameters ----------",
"neg] mean = [] for word, direction in pos + neg: mean.append(direction *",
"tuples based on the indexes Each tuple is: (vocab[i], metric[i]) ''' if isinstance(exclude,",
"of tuples based on the indexes Each tuple is: (vocab[i], metric[i]) ''' if",
"ans = {} for col, word in enumerate(words): best = np.argsort(metrics[:, col])[::-1][:n +",
"def generate_response(self, indexes, metric, exclude=''): ''' Generates a response as a list of",
"pos] neg = [(word, -1.0) for word in neg] mean = [] for",
"Example ------- >>> model.cosine('black', n=2) ``` ``` {'black': [('white', 0.94757425919916516), ('yellow', 0.94640807944950878)] }",
"model.cosine('black', n=2) ``` ``` {'black': [('white', 0.94757425919916516), ('yellow', 0.94640807944950878)] } ''' if isinstance(words,",
"man + woman = queen` will be: `pos=['king', 'woman'], neg=['man']` ''' words =",
"coding: utf-8 import numpy as np from word2vec.utils import unitvec class WordVectors(object): def",
"calculate the vectors n : int, optional (default 10) number of neighbors to",
"return [(word, sim) for word, sim in zip(self.vocab[indexes], metric[indexes]) if word not in",
"vocabulary') else: return temp[0] def get_vector(self, word): ''' Returns the (l2norm) vector for",
"np.argsort(metrics[:, col])[::-1][:n + 1] best = self.generate_response(best, metrics[:, col], exclude=word) ans[word] = best",
"neg: mean.append(direction * unitvec(self.get_vector(word))) mean = np.array(mean).mean(axis=0) similarities = np.dot(self.l2norm, mean) best =",
"<filename>word2vec/wordvectors.py<gh_stars>1-10 # coding: utf-8 import numpy as np from word2vec.utils import unitvec class",
"tuple is (word, similarity) Example ------- `king - man + woman = queen`",
"tuples, each tuple is (word, similarity) Example ------- `king - man + woman",
"words)) metrics = np.dot(self.l2norm, targets.T) ans = {} for col, word in enumerate(words):",
"neighbors to return ''' from scipy.spatial import distance target_vec = self[word] metric =",
"Returns the index on self.vocab and self.l2norm for `word` ''' temp = np.where(self.vocab",
"with the n similar words and its similarity as a list of tuples",
"just use `self.cosine` This is just available for testing. Requires: `__init__(..., saveMemory=False)` Parameters",
"__init__(self, vocab=None, vectors=None, saveMemory=True): self.vocab = vocab if not saveMemory: self.vectors = vectors",
"the n similar words and its similarity as a list of tuples Example",
"+ neg pos = [(word, 1.0) for word in pos] neg = [(word,",
"string word(s) in the vocabulary to calculate the vectors n : int, optional",
"np.vstack((self.get_vector(word) for word in words)) metrics = np.dot(self.l2norm, targets.T) ans = {} for",
"a list of tuples Example ------- >>> model.cosine('black', n=2) ``` ``` {'black': [('white',",
"0: raise KeyError('Word not in vocabulary') else: return temp[0] def get_vector(self, word): '''",
"be: `pos=['king', 'woman'], neg=['man']` ''' words = pos + neg pos = [(word,",
"saveMemory=True): self.vocab = vocab if not saveMemory: self.vectors = vectors self.l2norm = np.vstack(unitvec(vec)",
"available for testing. Requires: `__init__(..., saveMemory=False)` Parameters ---------- word : string word in",
"word)[0] if temp.size == 0: raise KeyError('Word not in vocabulary') else: return temp[0]",
"scipy.distance.cosine Note: This method is **a lot** slower than `self.cosine` and results are",
"n=2) ``` ``` {'black': [('white', 0.94757425919916516), ('yellow', 0.94640807944950878)] } ''' if isinstance(words, basestring):",
"= best return ans def _cosine(self, word, n=10): ''' Cosine distance using scipy.distance.cosine",
"list of string word(s) in the vocabulary to calculate the vectors n :",
"np.where(self.vocab == word)[0] if temp.size == 0: raise KeyError('Word not in vocabulary') else:",
"def _cosine(self, word, n=10): ''' Cosine distance using scipy.distance.cosine Note: This method is",
"pos : list neg : list Returns ------- List of tuples, each tuple",
"= metric.argsort()[:n + 1] return self.generate_response(best, metric, exclude=word) def analogy(self, pos, neg, n=10):",
"neg = [(word, -1.0) for word in neg] mean = [] for word,",
"self.vocab and self.l2norm for `word` ''' temp = np.where(self.vocab == word)[0] if temp.size",
"the vocabulary to calculate the vectors n : int, optional (default 10) number",
"n : int, optional (default 10) number of neighbors to return Returns -------",
"metrics[:, col], exclude=word) ans[word] = best return ans def _cosine(self, word, n=10): '''",
"return ''' from scipy.spatial import distance target_vec = self[word] metric = np.empty(self.vocab.shape) for",
"metrics = np.dot(self.l2norm, targets.T) ans = {} for col, word in enumerate(words): best",
"word not in exclude] def cosine(self, words, n=10): ''' Cosine similarity. metric =",
"word in neg] mean = [] for word, direction in pos + neg:",
"== word)[0] if temp.size == 0: raise KeyError('Word not in vocabulary') else: return",
"vectors) def ix(self, word): ''' Returns the index on self.vocab and self.l2norm for",
"''' idx = self.ix(word) return self.l2norm[idx] def __getitem__(self, word): return self.get_vector(word) def generate_response(self,",
"vectors=None, saveMemory=True): self.vocab = vocab if not saveMemory: self.vectors = vectors self.l2norm =",
"self[word] metric = np.empty(self.vocab.shape) for idx, vector in enumerate(self.vectors): metric[idx] = distance.cosine(target_vec, vector)",
"is: (vocab[i], metric[i]) ''' if isinstance(exclude, basestring): exclude = [exclude] return [(word, sim)",
"``` {'black': [('white', 0.94757425919916516), ('yellow', 0.94640807944950878)] } ''' if isinstance(words, basestring): words =",
"words = pos + neg pos = [(word, 1.0) for word in pos]",
"metric = dot(l2norm_of_vectors, l2norm_of_target_vector) Uses a precomputed l2norm of the vectors Parameters ----------",
"Returns ------- dict: with the n similar words and its similarity as a",
"ix(self, word): ''' Returns the index on self.vocab and self.l2norm for `word` '''",
"and its similarity as a list of tuples Example ------- >>> model.cosine('black', n=2)",
"tuples Example ------- >>> model.cosine('black', n=2) ``` ``` {'black': [('white', 0.94757425919916516), ('yellow', 0.94640807944950878)]",
"as a list of tuples based on the indexes Each tuple is: (vocab[i],",
"[] for word, direction in pos + neg: mean.append(direction * unitvec(self.get_vector(word))) mean =",
"list of tuples based on the indexes Each tuple is: (vocab[i], metric[i]) '''",
"---------- pos : list neg : list Returns ------- List of tuples, each",
"if word not in exclude] def cosine(self, words, n=10): ''' Cosine similarity. metric",
"using scipy.distance.cosine Note: This method is **a lot** slower than `self.cosine` and results",
"= [exclude] return [(word, sim) for word, sim in zip(self.vocab[indexes], metric[indexes]) if word",
"= np.argsort(metrics[:, col])[::-1][:n + 1] best = self.generate_response(best, metrics[:, col], exclude=word) ans[word] =",
"n=10): ''' Analogy similarity. Parameters ---------- pos : list neg : list Returns",
"list neg : list Returns ------- List of tuples, each tuple is (word,",
"''' from scipy.spatial import distance target_vec = self[word] metric = np.empty(self.vocab.shape) for idx,",
"(default 10) number of neighbors to return Returns ------- dict: with the n",
"return self.generate_response(best, metric, exclude=word) def analogy(self, pos, neg, n=10): ''' Analogy similarity. Parameters",
"self.l2norm for `word` ''' temp = np.where(self.vocab == word)[0] if temp.size == 0:",
"not in exclude] def cosine(self, words, n=10): ''' Cosine similarity. metric = dot(l2norm_of_vectors,",
"col], exclude=word) ans[word] = best return ans def _cosine(self, word, n=10): ''' Cosine",
"for word in words)) metrics = np.dot(self.l2norm, targets.T) ans = {} for col,",
"targets = np.vstack((self.get_vector(word) for word in words)) metrics = np.dot(self.l2norm, targets.T) ans =",
"temp.size == 0: raise KeyError('Word not in vocabulary') else: return temp[0] def get_vector(self,",
"[words] targets = np.vstack((self.get_vector(word) for word in words)) metrics = np.dot(self.l2norm, targets.T) ans",
"similarity. Parameters ---------- pos : list neg : list Returns ------- List of",
"[(word, 1.0) for word in pos] neg = [(word, -1.0) for word in",
"vocab=None, vectors=None, saveMemory=True): self.vocab = vocab if not saveMemory: self.vectors = vectors self.l2norm",
"is (word, similarity) Example ------- `king - man + woman = queen` will",
"dot(l2norm_of_vectors, l2norm_of_target_vector) Uses a precomputed l2norm of the vectors Parameters ---------- words :",
"string word in the vocabulary to calculate the vectors n : int, optional",
"metric, exclude=''): ''' Generates a response as a list of tuples based on",
"{'black': [('white', 0.94757425919916516), ('yellow', 0.94640807944950878)] } ''' if isinstance(words, basestring): words = [words]",
"in pos + neg: mean.append(direction * unitvec(self.get_vector(word))) mean = np.array(mean).mean(axis=0) similarities = np.dot(self.l2norm,",
"Parameters ---------- words : string or list of string word(s) in the vocabulary",
"self.vocab = vocab if not saveMemory: self.vectors = vectors self.l2norm = np.vstack(unitvec(vec) for",
"method is **a lot** slower than `self.cosine` and results are the almost the",
"woman = queen` will be: `pos=['king', 'woman'], neg=['man']` ''' words = pos +",
"similarities = np.dot(self.l2norm, mean) best = similarities.argsort()[::-1][:n + len(words) - 1] return self.generate_response(best,",
"pos, neg, n=10): ''' Analogy similarity. Parameters ---------- pos : list neg :",
"neg=['man']` ''' words = pos + neg pos = [(word, 1.0) for word",
"will be: `pos=['king', 'woman'], neg=['man']` ''' words = pos + neg pos =",
"- man + woman = queen` will be: `pos=['king', 'woman'], neg=['man']` ''' words",
"words = [words] targets = np.vstack((self.get_vector(word) for word in words)) metrics = np.dot(self.l2norm,",
"words, n=10): ''' Cosine similarity. metric = dot(l2norm_of_vectors, l2norm_of_target_vector) Uses a precomputed l2norm",
"np.dot(self.l2norm, mean) best = similarities.argsort()[::-1][:n + len(words) - 1] return self.generate_response(best, similarities, exclude=words)",
"vectors n : int, optional (default 10) number of neighbors to return Returns",
"to return Returns ------- dict: with the n similar words and its similarity",
"vector in enumerate(self.vectors): metric[idx] = distance.cosine(target_vec, vector) best = metric.argsort()[:n + 1] return",
"(word, similarity) Example ------- `king - man + woman = queen` will be:",
"basestring): exclude = [exclude] return [(word, sim) for word, sim in zip(self.vocab[indexes], metric[indexes])",
"for word in neg] mean = [] for word, direction in pos +",
"of neighbors to return Returns ------- dict: with the n similar words and",
"in pos] neg = [(word, -1.0) for word in neg] mean = []",
"exclude] def cosine(self, words, n=10): ''' Cosine similarity. metric = dot(l2norm_of_vectors, l2norm_of_target_vector) Uses",
"in zip(self.vocab[indexes], metric[indexes]) if word not in exclude] def cosine(self, words, n=10): '''",
"return temp[0] def get_vector(self, word): ''' Returns the (l2norm) vector for `word` in",
"`self.cosine` and results are the almost the same, really just use `self.cosine` This",
"are the almost the same, really just use `self.cosine` This is just available",
"Generates a response as a list of tuples based on the indexes Each",
"**a lot** slower than `self.cosine` and results are the almost the same, really",
"{} for col, word in enumerate(words): best = np.argsort(metrics[:, col])[::-1][:n + 1] best",
"metric[indexes]) if word not in exclude] def cosine(self, words, n=10): ''' Cosine similarity.",
"vectors Parameters ---------- words : string or list of string word(s) in the",
"ans def _cosine(self, word, n=10): ''' Cosine distance using scipy.distance.cosine Note: This method",
"self.l2norm[idx] def __getitem__(self, word): return self.get_vector(word) def generate_response(self, indexes, metric, exclude=''): ''' Generates",
"similarity as a list of tuples Example ------- >>> model.cosine('black', n=2) ``` ```",
"if not saveMemory: self.vectors = vectors self.l2norm = np.vstack(unitvec(vec) for vec in vectors)",
": int, optional (default 10) number of neighbors to return Returns ------- dict:",
"('yellow', 0.94640807944950878)] } ''' if isinstance(words, basestring): words = [words] targets = np.vstack((self.get_vector(word)",
"np from word2vec.utils import unitvec class WordVectors(object): def __init__(self, vocab=None, vectors=None, saveMemory=True): self.vocab",
"mean.append(direction * unitvec(self.get_vector(word))) mean = np.array(mean).mean(axis=0) similarities = np.dot(self.l2norm, mean) best = similarities.argsort()[::-1][:n",
"''' temp = np.where(self.vocab == word)[0] if temp.size == 0: raise KeyError('Word not",
"pos + neg pos = [(word, 1.0) for word in pos] neg =",
"on the indexes Each tuple is: (vocab[i], metric[i]) ''' if isinstance(exclude, basestring): exclude",
"of string word(s) in the vocabulary to calculate the vectors n : int,",
"lot** slower than `self.cosine` and results are the almost the same, really just",
"numpy as np from word2vec.utils import unitvec class WordVectors(object): def __init__(self, vocab=None, vectors=None,",
"import unitvec class WordVectors(object): def __init__(self, vocab=None, vectors=None, saveMemory=True): self.vocab = vocab if",
"number of neighbors to return ''' from scipy.spatial import distance target_vec = self[word]",
"(default 10) number of neighbors to return ''' from scipy.spatial import distance target_vec",
"vectors n : int, optional (default 10) number of neighbors to return '''",
"n=10): ''' Cosine similarity. metric = dot(l2norm_of_vectors, l2norm_of_target_vector) Uses a precomputed l2norm of",
"l2norm_of_target_vector) Uses a precomputed l2norm of the vectors Parameters ---------- words : string",
"= queen` will be: `pos=['king', 'woman'], neg=['man']` ''' words = pos + neg",
"+ neg: mean.append(direction * unitvec(self.get_vector(word))) mean = np.array(mean).mean(axis=0) similarities = np.dot(self.l2norm, mean) best",
"vec in vectors) def ix(self, word): ''' Returns the index on self.vocab and",
"np.empty(self.vocab.shape) for idx, vector in enumerate(self.vectors): metric[idx] = distance.cosine(target_vec, vector) best = metric.argsort()[:n",
"def cosine(self, words, n=10): ''' Cosine similarity. metric = dot(l2norm_of_vectors, l2norm_of_target_vector) Uses a",
"= vocab if not saveMemory: self.vectors = vectors self.l2norm = np.vstack(unitvec(vec) for vec",
"word): return self.get_vector(word) def generate_response(self, indexes, metric, exclude=''): ''' Generates a response as",
"temp[0] def get_vector(self, word): ''' Returns the (l2norm) vector for `word` in the",
"0.94640807944950878)] } ''' if isinstance(words, basestring): words = [words] targets = np.vstack((self.get_vector(word) for",
"based on the indexes Each tuple is: (vocab[i], metric[i]) ''' if isinstance(exclude, basestring):",
"cosine(self, words, n=10): ''' Cosine similarity. metric = dot(l2norm_of_vectors, l2norm_of_target_vector) Uses a precomputed",
"[(word, -1.0) for word in neg] mean = [] for word, direction in",
"almost the same, really just use `self.cosine` This is just available for testing."
] |
[
"<reponame>TomPretty/calculator from calculator import evaluate def test_evaluating_a_constant_expression(): source = \"3\" assert evaluate(source) ==",
"from calculator import evaluate def test_evaluating_a_constant_expression(): source = \"3\" assert evaluate(source) == 3"
] |
[
"jdbot.delete_messages(chat_id, msg) cmdtext = res conv.cancel() if cmdtext: await cmd(cmdtext.replace('nohup ', '')) except",
"jdbot.edit_message(msg, '请做出您的选择:', buttons=markup) convdata = await conv.wait_event(press_event(SENDER)) res = bytes.decode(convdata.data) if res ==",
"with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts = f.readlines() try: cmdtext = None",
"jdbot.delete_messages(chat_id, msg) markup = [Button.text(shortcut, single_use=True) for shortcut in shortcuts if '-->' not",
"shortcut] markup = split_list(markup, 3) markup.append([Button.inline('取消', data='cancel')]) msg = await jdbot.edit_message(msg, '请做出您的选择:', buttons=markup)",
"as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async",
"await jdbot.send_message(chat_id, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') if ch_name: jdbot.add_event_handler(my_a, events.NewMessage( from_users=chat_id,",
"res = bytes.decode(convdata.data) if res == 'cancel': msg = await jdbot.edit_message(msg, '对话已取消') conv.cancel()",
"encoding='utf-8') as f: shortcuts = f.readlines() try: await jdbot.delete_messages(chat_id, msg) markup = [Button.text(shortcut,",
"', '')) except exceptions.TimeoutError: msg = await jdbot.edit_message(msg, '选择已超时,对话已停止') except Exception as e:",
"'选择已超时,对话已停止') except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}')",
"if res == 'cancel': msg = await jdbot.edit_message(msg, '对话已取消') conv.cancel() else: await jdbot.delete_messages(chat_id,",
"await jdbot.edit_message(msg, '对话已取消') conv.cancel() else: await jdbot.delete_messages(chat_id, msg) cmdtext = res conv.cancel() if",
"await cmd(cmdtext.replace('nohup ', '')) except exceptions.TimeoutError: msg = await jdbot.edit_message(msg, '选择已超时,对话已停止') except Exception",
"markup = split_list(markup, 3) markup.append([Button.inline('取消', data='cancel')]) msg = await jdbot.edit_message(msg, '请做出您的选择:', buttons=markup) convdata",
"f: shortcuts = f.readlines() try: await jdbot.delete_messages(chat_id, msg) markup = [Button.text(shortcut, single_use=True) for",
"'cancel': msg = await jdbot.edit_message(msg, '对话已取消') conv.cancel() else: await jdbot.delete_messages(chat_id, msg) cmdtext =",
"with jdbot.conversation(SENDER, timeout=60) as conv: markup = [Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1])) for shortcut in",
"f: shortcuts = f.readlines() try: cmdtext = None async with jdbot.conversation(SENDER, timeout=60) as",
"open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts = f.readlines() try: await jdbot.delete_messages(chat_id, msg) markup",
"SHORTCUT_FILE, logger, BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async def my_a(event): markup = [] SENDER",
"await jdbot.edit_message(msg, '请做出您的选择:', buttons=markup) convdata = await conv.wait_event(press_event(SENDER)) res = bytes.decode(convdata.data) if res",
"= [] SENDER = event.sender_id msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r',",
"if '-->' not in shortcut] markup = split_list(markup, int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup)",
"@jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async def my_b(event): markup = [] msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后')",
"except Exception as e: await jdbot.send_message(chat_id, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') if",
"shortcut] markup = split_list(markup, int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup) except Exception as e:",
"= await jdbot.edit_message(msg, '对话已取消') conv.cancel() else: await jdbot.delete_messages(chat_id, msg) cmdtext = res conv.cancel()",
"[Button.text(shortcut, single_use=True) for shortcut in shortcuts if '-->' not in shortcut] markup =",
"= None async with jdbot.conversation(SENDER, timeout=60) as conv: markup = [Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1]))",
"from telethon import events, Button from .utils import split_list, press_event, cmd from asyncio",
"f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') if ch_name: jdbot.add_event_handler(my_a, events.NewMessage( from_users=chat_id, pattern=BOT_SET['命令别名']['a'])) jdbot.add_event_handler(my_b,",
"if cmdtext: await cmd(cmdtext.replace('nohup ', '')) except exceptions.TimeoutError: msg = await jdbot.edit_message(msg, '选择已超时,对话已停止')",
"jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts = f.readlines() try: cmdtext",
"try: cmdtext = None async with jdbot.conversation(SENDER, timeout=60) as conv: markup = [Button.inline(shortcut.split(",
"wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async def my_b(event): markup = [] msg = await",
"Button from .utils import split_list, press_event, cmd from asyncio import exceptions from ..",
"except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id,",
"await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except Exception as e: await jdbot.send_message(chat_id, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something",
"= event.sender_id msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f:",
"import jdbot, chat_id, SHORTCUT_FILE, logger, BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async def my_a(event): markup",
"markup = split_list(markup, int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup) except Exception as e: await",
"msg = await jdbot.edit_message(msg, '选择已超时,对话已停止') except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m",
"sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async def my_b(event): markup = [] msg = await jdbot.send_message(chat_id,",
"await jdbot.delete_messages(chat_id, msg) markup = [Button.text(shortcut, single_use=True) for shortcut in shortcuts if '-->'",
"'请做出您的选择:', buttons=markup) convdata = await conv.wait_event(press_event(SENDER)) res = bytes.decode(convdata.data) if res == 'cancel':",
"= f.readlines() try: cmdtext = None async with jdbot.conversation(SENDER, timeout=60) as conv: markup",
"'-->' not in shortcut] markup = split_list(markup, int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup) except",
"wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async def my_clear(event): try: await jdbot.send_message(chat_id,",
"from .utils import split_list, press_event, cmd from asyncio import exceptions from .. import",
"markup.append([Button.inline('取消', data='cancel')]) msg = await jdbot.edit_message(msg, '请做出您的选择:', buttons=markup) convdata = await conv.wait_event(press_event(SENDER)) res",
"await conv.wait_event(press_event(SENDER)) res = bytes.decode(convdata.data) if res == 'cancel': msg = await jdbot.edit_message(msg,",
"= split_list(markup, 3) markup.append([Button.inline('取消', data='cancel')]) msg = await jdbot.edit_message(msg, '请做出您的选择:', buttons=markup) convdata =",
"wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') if ch_name: jdbot.add_event_handler(my_a, events.NewMessage( from_users=chat_id, pattern=BOT_SET['命令别名']['a'])) jdbot.add_event_handler(my_b, events.NewMessage(from_users=chat_id,",
"sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') if ch_name: jdbot.add_event_handler(my_a, events.NewMessage( from_users=chat_id, pattern=BOT_SET['命令别名']['a'])) jdbot.add_event_handler(my_b, events.NewMessage(from_users=chat_id, pattern=BOT_SET['命令别名']['b']))",
"BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async def my_a(event): markup = [] SENDER = event.sender_id",
"with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts = f.readlines() try: await jdbot.delete_messages(chat_id, msg)",
"msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts =",
".utils import split_list, press_event, cmd from asyncio import exceptions from .. import jdbot,",
"= res conv.cancel() if cmdtext: await cmd(cmdtext.replace('nohup ', '')) except exceptions.TimeoutError: msg =",
"pattern=r'^/clearboard$')) async def my_clear(event): try: await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except Exception as e: await",
"jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async def my_b(event): markup",
"f.readlines() try: await jdbot.delete_messages(chat_id, msg) markup = [Button.text(shortcut, single_use=True) for shortcut in shortcuts",
"cmd(cmdtext.replace('nohup ', '')) except exceptions.TimeoutError: msg = await jdbot.edit_message(msg, '选择已超时,对话已停止') except Exception as",
"await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts = f.readlines() try:",
"async def my_b(event): markup = [] msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE,",
"try: await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except Exception as e: await jdbot.send_message(chat_id, f'something wrong,I\\'m sorry\\n{str(e)}')",
"from asyncio import exceptions from .. import jdbot, chat_id, SHORTCUT_FILE, logger, BOT_SET, ch_name",
"import split_list, press_event, cmd from asyncio import exceptions from .. import jdbot, chat_id,",
"convdata = await conv.wait_event(press_event(SENDER)) res = bytes.decode(convdata.data) if res == 'cancel': msg =",
"msg) markup = [Button.text(shortcut, single_use=True) for shortcut in shortcuts if '-->' not in",
"markup = [Button.text(shortcut, single_use=True) for shortcut in shortcuts if '-->' not in shortcut]",
"press_event, cmd from asyncio import exceptions from .. import jdbot, chat_id, SHORTCUT_FILE, logger,",
"import exceptions from .. import jdbot, chat_id, SHORTCUT_FILE, logger, BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$'))",
"await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async def my_clear(event):",
"conv.cancel() if cmdtext: await cmd(cmdtext.replace('nohup ', '')) except exceptions.TimeoutError: msg = await jdbot.edit_message(msg,",
"shortcuts if '-->' in shortcut] markup = split_list(markup, 3) markup.append([Button.inline('取消', data='cancel')]) msg =",
"wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async def my_clear(event): try: await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except Exception",
"'正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts = f.readlines() try: cmdtext =",
"in shortcut] markup = split_list(markup, int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup) except Exception as",
"cmdtext: await cmd(cmdtext.replace('nohup ', '')) except exceptions.TimeoutError: msg = await jdbot.edit_message(msg, '选择已超时,对话已停止') except",
"'')) except exceptions.TimeoutError: msg = await jdbot.edit_message(msg, '选择已超时,对话已停止') except Exception as e: await",
"jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts = f.readlines() try: await",
"asyncio import exceptions from .. import jdbot, chat_id, SHORTCUT_FILE, logger, BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id,",
"'对话已取消') conv.cancel() else: await jdbot.delete_messages(chat_id, msg) cmdtext = res conv.cancel() if cmdtext: await",
"my_a(event): markup = [] SENDER = event.sender_id msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with",
"logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async def my_b(event): markup = [] msg =",
"markup = [] msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as",
"jdbot.send_message(chat_id, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') if ch_name: jdbot.add_event_handler(my_a, events.NewMessage( from_users=chat_id, pattern=BOT_SET['命令别名']['a']))",
"async def my_a(event): markup = [] SENDER = event.sender_id msg = await jdbot.send_message(chat_id,",
".. import jdbot, chat_id, SHORTCUT_FILE, logger, BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async def my_a(event):",
"exceptions.TimeoutError: msg = await jdbot.edit_message(msg, '选择已超时,对话已停止') except Exception as e: await jdbot.edit_message(msg, f'something",
"jdbot, chat_id, SHORTCUT_FILE, logger, BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async def my_a(event): markup =",
"for shortcut in shortcuts if '-->' not in shortcut] markup = split_list(markup, int(BOT_SET['每页列数']))",
"pattern=r'^/a$')) async def my_a(event): markup = [] SENDER = event.sender_id msg = await",
"f.readlines() try: cmdtext = None async with jdbot.conversation(SENDER, timeout=60) as conv: markup =",
"shortcut in shortcuts if '-->' not in shortcut] markup = split_list(markup, int(BOT_SET['每页列数'])) await",
"cmd from asyncio import exceptions from .. import jdbot, chat_id, SHORTCUT_FILE, logger, BOT_SET,",
"as conv: markup = [Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1])) for shortcut in shortcuts if '-->'",
"wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async def my_b(event): markup = []",
"ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async def my_a(event): markup = [] SENDER = event.sender_id msg",
"logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async def my_clear(event): try: await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except",
"jdbot.conversation(SENDER, timeout=60) as conv: markup = [Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1])) for shortcut in shortcuts",
"for shortcut in shortcuts if '-->' in shortcut] markup = split_list(markup, 3) markup.append([Button.inline('取消',",
"conv.cancel() else: await jdbot.delete_messages(chat_id, msg) cmdtext = res conv.cancel() if cmdtext: await cmd(cmdtext.replace('nohup",
"Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$'))",
"= [Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1])) for shortcut in shortcuts if '-->' in shortcut] markup",
"my_b(event): markup = [] msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8')",
"[] msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts",
"await jdbot.delete_messages(chat_id, msg) cmdtext = res conv.cancel() if cmdtext: await cmd(cmdtext.replace('nohup ', ''))",
"jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup) except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something",
"logger, BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async def my_a(event): markup = [] SENDER =",
"buttons=markup) convdata = await conv.wait_event(press_event(SENDER)) res = bytes.decode(convdata.data) if res == 'cancel': msg",
"events, Button from .utils import split_list, press_event, cmd from asyncio import exceptions from",
"pattern=r'^/b$')) async def my_b(event): markup = [] msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with",
"single_use=True) for shortcut in shortcuts if '-->' not in shortcut] markup = split_list(markup,",
"shortcut in shortcuts if '-->' in shortcut] markup = split_list(markup, 3) markup.append([Button.inline('取消', data='cancel')])",
"split_list(markup, int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup) except Exception as e: await jdbot.edit_message(msg, f'something",
"as f: shortcuts = f.readlines() try: cmdtext = None async with jdbot.conversation(SENDER, timeout=60)",
"markup = [Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1])) for shortcut in shortcuts if '-->' in shortcut]",
"= await jdbot.edit_message(msg, '请做出您的选择:', buttons=markup) convdata = await conv.wait_event(press_event(SENDER)) res = bytes.decode(convdata.data) if",
"open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts = f.readlines() try: cmdtext = None async",
"jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except Exception as e: await jdbot.send_message(chat_id, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m",
"msg = await jdbot.edit_message(msg, '请做出您的选择:', buttons=markup) convdata = await conv.wait_event(press_event(SENDER)) res = bytes.decode(convdata.data)",
"int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup) except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m",
"'r', encoding='utf-8') as f: shortcuts = f.readlines() try: await jdbot.delete_messages(chat_id, msg) markup =",
"shortcuts = f.readlines() try: cmdtext = None async with jdbot.conversation(SENDER, timeout=60) as conv:",
"None async with jdbot.conversation(SENDER, timeout=60) as conv: markup = [Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1])) for",
"async with jdbot.conversation(SENDER, timeout=60) as conv: markup = [Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1])) for shortcut",
"'-->')[0], data=str(shortcut.split('-->')[-1])) for shortcut in shortcuts if '-->' in shortcut] markup = split_list(markup,",
"'请做出您的选择:', buttons=markup) except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m",
"async def my_clear(event): try: await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except Exception as e: await jdbot.send_message(chat_id,",
"data='cancel')]) msg = await jdbot.edit_message(msg, '请做出您的选择:', buttons=markup) convdata = await conv.wait_event(press_event(SENDER)) res =",
"def my_a(event): markup = [] SENDER = event.sender_id msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后')",
"markup = [] SENDER = event.sender_id msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE,",
"else: await jdbot.delete_messages(chat_id, msg) cmdtext = res conv.cancel() if cmdtext: await cmd(cmdtext.replace('nohup ',",
"import events, Button from .utils import split_list, press_event, cmd from asyncio import exceptions",
"conv: markup = [Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1])) for shortcut in shortcuts if '-->' in",
"split_list, press_event, cmd from asyncio import exceptions from .. import jdbot, chat_id, SHORTCUT_FILE,",
"Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$'))",
"await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async def my_b(event):",
"f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async def my_clear(event): try: await",
"res conv.cancel() if cmdtext: await cmd(cmdtext.replace('nohup ', '')) except exceptions.TimeoutError: msg = await",
"jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async def my_clear(event): try:",
"= f.readlines() try: await jdbot.delete_messages(chat_id, msg) markup = [Button.text(shortcut, single_use=True) for shortcut in",
"in shortcuts if '-->' in shortcut] markup = split_list(markup, 3) markup.append([Button.inline('取消', data='cancel')]) msg",
"e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async def",
"split_list(markup, 3) markup.append([Button.inline('取消', data='cancel')]) msg = await jdbot.edit_message(msg, '请做出您的选择:', buttons=markup) convdata = await",
"def my_clear(event): try: await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except Exception as e: await jdbot.send_message(chat_id, f'something",
"Exception as e: await jdbot.send_message(chat_id, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') if ch_name:",
"cmdtext = None async with jdbot.conversation(SENDER, timeout=60) as conv: markup = [Button.inline(shortcut.split( '-->')[0],",
"sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async def my_clear(event): try: await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except Exception as",
"e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async def",
"exceptions from .. import jdbot, chat_id, SHORTCUT_FILE, logger, BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async",
"def my_b(event): markup = [] msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r',",
"3) markup.append([Button.inline('取消', data='cancel')]) msg = await jdbot.edit_message(msg, '请做出您的选择:', buttons=markup) convdata = await conv.wait_event(press_event(SENDER))",
"except exceptions.TimeoutError: msg = await jdbot.edit_message(msg, '选择已超时,对话已停止') except Exception as e: await jdbot.edit_message(msg,",
"await jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup) except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}')",
"jdbot.edit_message(msg, '对话已取消') conv.cancel() else: await jdbot.delete_messages(chat_id, msg) cmdtext = res conv.cancel() if cmdtext:",
"= await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts = f.readlines()",
"encoding='utf-8') as f: shortcuts = f.readlines() try: cmdtext = None async with jdbot.conversation(SENDER,",
"'已清空您的keyboard',buttons=Button.clear()) except Exception as e: await jdbot.send_message(chat_id, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}')",
"= split_list(markup, int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup) except Exception as e: await jdbot.edit_message(msg,",
"= bytes.decode(convdata.data) if res == 'cancel': msg = await jdbot.edit_message(msg, '对话已取消') conv.cancel() else:",
"msg) cmdtext = res conv.cancel() if cmdtext: await cmd(cmdtext.replace('nohup ', '')) except exceptions.TimeoutError:",
"= [] msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f:",
"chat_id, SHORTCUT_FILE, logger, BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async def my_a(event): markup = []",
"telethon import events, Button from .utils import split_list, press_event, cmd from asyncio import",
"jdbot.edit_message(msg, '选择已超时,对话已停止') except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m",
"try: await jdbot.delete_messages(chat_id, msg) markup = [Button.text(shortcut, single_use=True) for shortcut in shortcuts if",
"'-->' in shortcut] markup = split_list(markup, 3) markup.append([Button.inline('取消', data='cancel')]) msg = await jdbot.edit_message(msg,",
"timeout=60) as conv: markup = [Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1])) for shortcut in shortcuts if",
"res == 'cancel': msg = await jdbot.edit_message(msg, '对话已取消') conv.cancel() else: await jdbot.delete_messages(chat_id, msg)",
"SENDER = event.sender_id msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as",
"'r', encoding='utf-8') as f: shortcuts = f.readlines() try: cmdtext = None async with",
"[] SENDER = event.sender_id msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8')",
"in shortcuts if '-->' not in shortcut] markup = split_list(markup, int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id,",
"not in shortcut] markup = split_list(markup, int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id, '请做出您的选择:', buttons=markup) except Exception",
"data=str(shortcut.split('-->')[-1])) for shortcut in shortcuts if '-->' in shortcut] markup = split_list(markup, 3)",
"== 'cancel': msg = await jdbot.edit_message(msg, '对话已取消') conv.cancel() else: await jdbot.delete_messages(chat_id, msg) cmdtext",
"= await jdbot.edit_message(msg, '选择已超时,对话已停止') except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}')",
"<reponame>dvdrm/gd from telethon import events, Button from .utils import split_list, press_event, cmd from",
"[Button.inline(shortcut.split( '-->')[0], data=str(shortcut.split('-->')[-1])) for shortcut in shortcuts if '-->' in shortcut] markup =",
"= await conv.wait_event(press_event(SENDER)) res = bytes.decode(convdata.data) if res == 'cancel': msg = await",
"as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async",
"@jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async def my_clear(event): try: await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except Exception as e:",
"cmdtext = res conv.cancel() if cmdtext: await cmd(cmdtext.replace('nohup ', '')) except exceptions.TimeoutError: msg",
"conv.wait_event(press_event(SENDER)) res = bytes.decode(convdata.data) if res == 'cancel': msg = await jdbot.edit_message(msg, '对话已取消')",
"sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/clearboard$')) async def my_clear(event): try: await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear())",
"as f: shortcuts = f.readlines() try: await jdbot.delete_messages(chat_id, msg) markup = [Button.text(shortcut, single_use=True)",
"from .. import jdbot, chat_id, SHORTCUT_FILE, logger, BOT_SET, ch_name @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async def",
"my_clear(event): try: await jdbot.send_message(chat_id, '已清空您的keyboard',buttons=Button.clear()) except Exception as e: await jdbot.send_message(chat_id, f'something wrong,I\\'m",
"sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async def my_b(event): markup = [] msg",
"shortcuts = f.readlines() try: await jdbot.delete_messages(chat_id, msg) markup = [Button.text(shortcut, single_use=True) for shortcut",
"msg = await jdbot.edit_message(msg, '对话已取消') conv.cancel() else: await jdbot.delete_messages(chat_id, msg) cmdtext = res",
"buttons=markup) except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}')",
"'正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts = f.readlines() try: await jdbot.delete_messages(chat_id,",
"as e: await jdbot.send_message(chat_id, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') if ch_name: jdbot.add_event_handler(my_a,",
"@jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/a$')) async def my_a(event): markup = [] SENDER = event.sender_id msg =",
"bytes.decode(convdata.data) if res == 'cancel': msg = await jdbot.edit_message(msg, '对话已取消') conv.cancel() else: await",
"await jdbot.edit_message(msg, '选择已超时,对话已停止') except Exception as e: await jdbot.edit_message(msg, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something",
"f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') @jdbot.on(events.NewMessage(from_users=chat_id, pattern=r'^/b$')) async def my_b(event): markup =",
"in shortcut] markup = split_list(markup, 3) markup.append([Button.inline('取消', data='cancel')]) msg = await jdbot.edit_message(msg, '请做出您的选择:',",
"e: await jdbot.send_message(chat_id, f'something wrong,I\\'m sorry\\n{str(e)}') logger.error(f'something wrong,I\\'m sorry\\n{str(e)}') if ch_name: jdbot.add_event_handler(my_a, events.NewMessage(",
"= [Button.text(shortcut, single_use=True) for shortcut in shortcuts if '-->' not in shortcut] markup",
"event.sender_id msg = await jdbot.send_message(chat_id, '正在查询您的常用命令,请稍后') with open(SHORTCUT_FILE, 'r', encoding='utf-8') as f: shortcuts",
"if '-->' in shortcut] markup = split_list(markup, 3) markup.append([Button.inline('取消', data='cancel')]) msg = await",
"shortcuts if '-->' not in shortcut] markup = split_list(markup, int(BOT_SET['每页列数'])) await jdbot.send_message(chat_id, '请做出您的选择:',"
] |
[
"__name__ == \"__main__\": while True: try: # Get the current unix epoch time",
"errin, errout, dropin, dropout) = nics[nic] if nic not in filternics: # Ignore",
"nic + \"_out,\" + str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\" + nic + \"_out\") print(now +",
"try: # Get the current unix epoch time now = str(int(time.time() / 1))",
"+ \",os.nic.packets.\" + nic + \"_recv,\" + str(packets_recv) + \",metric=pps;function=derivative;inversion=1;title=\" + nic +",
"# 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List of network devices we want",
"= psutil.net_io_counters(pernic=True) for nic in nics: (bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout, dropin,",
"\"_out\") print(now + \",os.nic.bytes.\" + nic + \"_in,\" + str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\" +",
"the current unix epoch time now = str(int(time.time() / 1)) # Output needs",
"epoch time now = str(int(time.time() / 1)) # Output needs to be in",
"+ \",os.nic.bytes.\" + nic + \"_in,\" + str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\" + nic +",
"import psutil import time import sys if __name__ == \"__main__\": while True: try:",
"+ str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\" + nic + \"_in\") print(now + \",os.nic.packets.\" + nic",
"str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\" + nic + \"_sent\") print(now + \",os.nic.packets.\" + nic +",
"+ nic + \"_recv,\" + str(packets_recv) + \",metric=pps;function=derivative;inversion=1;title=\" + nic + \"_recv\") sys.stdout.flush()",
"\",os.nic.packets.\" + nic + \"_sent,\" + str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\" + nic + \"_sent\")",
"\"__main__\": while True: try: # Get the current unix epoch time now =",
"== \"__main__\": while True: try: # Get the current unix epoch time now",
"+ str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\" + nic + \"_sent\") print(now + \",os.nic.packets.\" + nic",
"we want to exclude filternics = ['lo'] nics = psutil.net_io_counters(pernic=True) for nic in",
"psutil.net_io_counters(pernic=True) for nic in nics: (bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout, dropin, dropout)",
"List of network devices we want to exclude filternics = ['lo'] nics =",
"and packets_sent != 0: print(now + \",os.nic.bytes.\" + nic + \"_out,\" + str(bytes_sent)",
"+ \",os.nic.bytes.\" + nic + \"_out,\" + str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\" + nic +",
"dropout) = nics[nic] if nic not in filternics: # Ignore inactive network interfaces",
"= nics[nic] if nic not in filternics: # Ignore inactive network interfaces if",
"<gh_stars>0 #!/usr/bin/env __PYTHONVER__ import psutil import time import sys if __name__ == \"__main__\":",
"import sys if __name__ == \"__main__\": while True: try: # Get the current",
"Get the current unix epoch time now = str(int(time.time() / 1)) # Output",
"not in filternics: # Ignore inactive network interfaces if packets_recv != 0 and",
"nic + \"_in\") print(now + \",os.nic.packets.\" + nic + \"_sent,\" + str(packets_sent) +",
"want to exclude filternics = ['lo'] nics = psutil.net_io_counters(pernic=True) for nic in nics:",
"in filternics: # Ignore inactive network interfaces if packets_recv != 0 and packets_sent",
"devices we want to exclude filternics = ['lo'] nics = psutil.net_io_counters(pernic=True) for nic",
"\",os.nic.bytes.\" + nic + \"_out,\" + str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\" + nic + \"_out\")",
"if nic not in filternics: # Ignore inactive network interfaces if packets_recv !=",
"packets_recv, errin, errout, dropin, dropout) = nics[nic] if nic not in filternics: #",
"# Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List of",
"print(now + \",os.nic.bytes.\" + nic + \"_in,\" + str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\" + nic",
"packets_sent != 0: print(now + \",os.nic.bytes.\" + nic + \"_out,\" + str(bytes_sent) +",
"+ \"_in\") print(now + \",os.nic.packets.\" + nic + \"_sent,\" + str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\"",
"# 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List of network devices we want to exclude filternics =",
"errout, dropin, dropout) = nics[nic] if nic not in filternics: # Ignore inactive",
"0 and packets_sent != 0: print(now + \",os.nic.bytes.\" + nic + \"_out,\" +",
"+ \"_out\") print(now + \",os.nic.bytes.\" + nic + \"_in,\" + str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\"",
"+ \",metric=pps;function=derivative;inversion=-1;title=\" + nic + \"_sent\") print(now + \",os.nic.packets.\" + nic + \"_recv,\"",
"filternics = ['lo'] nics = psutil.net_io_counters(pernic=True) for nic in nics: (bytes_sent, bytes_recv, packets_sent,",
"# 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List of network devices",
"eg: # epoch,checkname,value,metadata # Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in",
"1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List of network devices we",
"1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List of network devices we want to exclude filternics",
"to be in comma format, eg: # epoch,checkname,value,metadata # Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out #",
"# Ignore inactive network interfaces if packets_recv != 0 and packets_sent != 0:",
"bytes_recv, packets_sent, packets_recv, errin, errout, dropin, dropout) = nics[nic] if nic not in",
"['lo'] nics = psutil.net_io_counters(pernic=True) for nic in nics: (bytes_sent, bytes_recv, packets_sent, packets_recv, errin,",
"comma format, eg: # epoch,checkname,value,metadata # Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out",
"\",metric=bps;function=derivative;inversion=1;title=\" + nic + \"_in\") print(now + \",os.nic.packets.\" + nic + \"_sent,\" +",
"now = str(int(time.time() / 1)) # Output needs to be in comma format,",
"\",metric=pps;function=derivative;inversion=-1;title=\" + nic + \"_sent\") print(now + \",os.nic.packets.\" + nic + \"_recv,\" +",
"/ 1)) # Output needs to be in comma format, eg: # epoch,checkname,value,metadata",
"nics = psutil.net_io_counters(pernic=True) for nic in nics: (bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout,",
"str(int(time.time() / 1)) # Output needs to be in comma format, eg: #",
"of network devices we want to exclude filternics = ['lo'] nics = psutil.net_io_counters(pernic=True)",
"in nics: (bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout, dropin, dropout) = nics[nic] if",
"+ \",os.nic.packets.\" + nic + \"_sent,\" + str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\" + nic +",
"network interfaces if packets_recv != 0 and packets_sent != 0: print(now + \",os.nic.bytes.\"",
"True: try: # Get the current unix epoch time now = str(int(time.time() /",
"interfaces if packets_recv != 0 and packets_sent != 0: print(now + \",os.nic.bytes.\" +",
"+ \"_recv,\" + str(packets_recv) + \",metric=pps;function=derivative;inversion=1;title=\" + nic + \"_recv\") sys.stdout.flush() time.sleep(1) except:",
"!= 0 and packets_sent != 0: print(now + \",os.nic.bytes.\" + nic + \"_out,\"",
"network devices we want to exclude filternics = ['lo'] nics = psutil.net_io_counters(pernic=True) for",
"be in comma format, eg: # epoch,checkname,value,metadata # Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in",
"+ \"_in,\" + str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\" + nic + \"_in\") print(now + \",os.nic.packets.\"",
"packets_recv != 0 and packets_sent != 0: print(now + \",os.nic.bytes.\" + nic +",
"filternics: # Ignore inactive network interfaces if packets_recv != 0 and packets_sent !=",
"1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List of network devices we want to",
"1)) # Output needs to be in comma format, eg: # epoch,checkname,value,metadata #",
"print(now + \",os.nic.packets.\" + nic + \"_sent,\" + str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\" + nic",
"nic + \"_sent,\" + str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\" + nic + \"_sent\") print(now +",
"\"_recv,\" + str(packets_recv) + \",metric=pps;function=derivative;inversion=1;title=\" + nic + \"_recv\") sys.stdout.flush() time.sleep(1) except: pass",
"time import sys if __name__ == \"__main__\": while True: try: # Get the",
"# 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List of network devices we want to exclude",
"\"_sent,\" + str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\" + nic + \"_sent\") print(now + \",os.nic.packets.\" +",
"epoch,checkname,value,metadata # Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List",
"(bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout, dropin, dropout) = nics[nic] if nic not",
"nic + \"_out\") print(now + \",os.nic.bytes.\" + nic + \"_in,\" + str(bytes_recv) +",
"nic not in filternics: # Ignore inactive network interfaces if packets_recv != 0",
"+ nic + \"_sent\") print(now + \",os.nic.packets.\" + nic + \"_recv,\" + str(packets_recv)",
"# Get the current unix epoch time now = str(int(time.time() / 1)) #",
"import time import sys if __name__ == \"__main__\": while True: try: # Get",
"# Output needs to be in comma format, eg: # epoch,checkname,value,metadata # Eg:",
"exclude filternics = ['lo'] nics = psutil.net_io_counters(pernic=True) for nic in nics: (bytes_sent, bytes_recv,",
"if packets_recv != 0 and packets_sent != 0: print(now + \",os.nic.bytes.\" + nic",
"= ['lo'] nics = psutil.net_io_counters(pernic=True) for nic in nics: (bytes_sent, bytes_recv, packets_sent, packets_recv,",
"0: print(now + \",os.nic.bytes.\" + nic + \"_out,\" + str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\" +",
"# epoch,checkname,value,metadata # Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in #",
"nic + \"_sent\") print(now + \",os.nic.packets.\" + nic + \"_recv,\" + str(packets_recv) +",
"needs to be in comma format, eg: # epoch,checkname,value,metadata # Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out",
"+ nic + \"_sent,\" + str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\" + nic + \"_sent\") print(now",
"to exclude filternics = ['lo'] nics = psutil.net_io_counters(pernic=True) for nic in nics: (bytes_sent,",
"packets_sent, packets_recv, errin, errout, dropin, dropout) = nics[nic] if nic not in filternics:",
"Ignore inactive network interfaces if packets_recv != 0 and packets_sent != 0: print(now",
"+ nic + \"_out,\" + str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\" + nic + \"_out\") print(now",
"1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List of network devices we want to exclude filternics = ['lo']",
"+ \"_sent,\" + str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\" + nic + \"_sent\") print(now + \",os.nic.packets.\"",
"+ nic + \"_out\") print(now + \",os.nic.bytes.\" + nic + \"_in,\" + str(bytes_recv)",
"\"_sent\") print(now + \",os.nic.packets.\" + nic + \"_recv,\" + str(packets_recv) + \",metric=pps;function=derivative;inversion=1;title=\" +",
"__PYTHONVER__ import psutil import time import sys if __name__ == \"__main__\": while True:",
"str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\" + nic + \"_out\") print(now + \",os.nic.bytes.\" + nic +",
"+ nic + \"_in\") print(now + \",os.nic.packets.\" + nic + \"_sent,\" + str(packets_sent)",
"psutil import time import sys if __name__ == \"__main__\": while True: try: #",
"for nic in nics: (bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout, dropin, dropout) =",
"+ \",metric=bps;function=derivative;inversion=1;title=\" + nic + \"_in\") print(now + \",os.nic.packets.\" + nic + \"_sent,\"",
"inactive network interfaces if packets_recv != 0 and packets_sent != 0: print(now +",
"unix epoch time now = str(int(time.time() / 1)) # Output needs to be",
"!= 0: print(now + \",os.nic.bytes.\" + nic + \"_out,\" + str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\"",
"dropin, dropout) = nics[nic] if nic not in filternics: # Ignore inactive network",
"print(now + \",os.nic.bytes.\" + nic + \"_out,\" + str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\" + nic",
"if __name__ == \"__main__\": while True: try: # Get the current unix epoch",
"nic in nics: (bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout, dropin, dropout) = nics[nic]",
"current unix epoch time now = str(int(time.time() / 1)) # Output needs to",
"time now = str(int(time.time() / 1)) # Output needs to be in comma",
"nic + \"_recv,\" + str(packets_recv) + \",metric=pps;function=derivative;inversion=1;title=\" + nic + \"_recv\") sys.stdout.flush() time.sleep(1)",
"+ nic + \"_in,\" + str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\" + nic + \"_in\") print(now",
"\",metric=bps;function=derivative;inversion=-1;title=\" + nic + \"_out\") print(now + \",os.nic.bytes.\" + nic + \"_in,\" +",
"#!/usr/bin/env __PYTHONVER__ import psutil import time import sys if __name__ == \"__main__\": while",
"nics: (bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout, dropin, dropout) = nics[nic] if nic",
"nics[nic] if nic not in filternics: # Ignore inactive network interfaces if packets_recv",
"\"_in,\" + str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\" + nic + \"_in\") print(now + \",os.nic.packets.\" +",
"\",os.nic.packets.\" + nic + \"_recv,\" + str(packets_recv) + \",metric=pps;function=derivative;inversion=1;title=\" + nic + \"_recv\")",
"+ \",metric=bps;function=derivative;inversion=-1;title=\" + nic + \"_out\") print(now + \",os.nic.bytes.\" + nic + \"_in,\"",
"print(now + \",os.nic.packets.\" + nic + \"_recv,\" + str(packets_recv) + \",metric=pps;function=derivative;inversion=1;title=\" + nic",
"+ \"_sent\") print(now + \",os.nic.packets.\" + nic + \"_recv,\" + str(packets_recv) + \",metric=pps;function=derivative;inversion=1;title=\"",
"\"_in\") print(now + \",os.nic.packets.\" + nic + \"_sent,\" + str(packets_sent) + \",metric=pps;function=derivative;inversion=-1;title=\" +",
"= str(int(time.time() / 1)) # Output needs to be in comma format, eg:",
"\"_out,\" + str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\" + nic + \"_out\") print(now + \",os.nic.bytes.\" +",
"format, eg: # epoch,checkname,value,metadata # Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out #",
"Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in # 1430157858,os.nic.bytes.em1_out,4128073428,metric=bps;title=em1_out # 1430157858,os.nic.bytes.em1_in,3156351939,metric=bps;title=em1_in # List of network",
"+ str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\" + nic + \"_out\") print(now + \",os.nic.bytes.\" + nic",
"Output needs to be in comma format, eg: # epoch,checkname,value,metadata # Eg: #",
"str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\" + nic + \"_in\") print(now + \",os.nic.packets.\" + nic +",
"\",os.nic.bytes.\" + nic + \"_in,\" + str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\" + nic + \"_in\")",
"while True: try: # Get the current unix epoch time now = str(int(time.time()",
"sys if __name__ == \"__main__\": while True: try: # Get the current unix",
"# List of network devices we want to exclude filternics = ['lo'] nics",
"in comma format, eg: # epoch,checkname,value,metadata # Eg: # 1430157858,os.nic.bytes.wlan0_out,32863319809,metric=bps;title=wlan0_out # 1430157858,os.nic.bytes.wlan0_in,6320095757,metric=bps;title=wlan0_in #",
"nic + \"_in,\" + str(bytes_recv) + \",metric=bps;function=derivative;inversion=1;title=\" + nic + \"_in\") print(now +",
"+ \"_out,\" + str(bytes_sent) + \",metric=bps;function=derivative;inversion=-1;title=\" + nic + \"_out\") print(now + \",os.nic.bytes.\""
] |
[
"MIMEMultipart # from email.mime.text import MIMEText # from email.mime.application import MIMEApplication region =",
"sent! Message ID: \" + response['MessageId'] return output def lambda_handler(event, context): # print(event)",
"print(event) # print(event['reply_address']) print(send_mail(event)) return { 'statusCode': 200, 'body': json.dumps('Hello from Lambda!') }",
"import MIMEMultipart # from email.mime.text import MIMEText # from email.mime.application import MIMEApplication region",
"ID: \" + response['MessageId'] return output def lambda_handler(event, context): # print(event) # print(event['reply_address'])",
"client_ses.send_email( Source='<EMAIL>', Destination={ 'ToAddresses': [], 'CcAddresses': [], 'BccAddresses': [] }, Message={ 'Subject': {'Data':",
"from email.mime.text import MIMEText # from email.mime.application import MIMEApplication region = os.environ['Region'] def",
"'CcAddresses': [], 'BccAddresses': [] }, Message={ 'Subject': {'Data': f\"from {msg['reply_address']}\"}, 'Body': {'Text': {'Data':",
"= \"Email sent! Message ID: \" + response['MessageId'] return output def lambda_handler(event, context):",
"response['MessageId'] return output def lambda_handler(event, context): # print(event) # print(event['reply_address']) print(send_mail(event)) return {",
"region) try: verify = client_ses.verify_email_address(EmailAddress=msg) response = client_ses.send_email( Source='<EMAIL>', Destination={ 'ToAddresses': [], 'CcAddresses':",
"client_ses.verify_email_address(EmailAddress=msg) response = client_ses.send_email( Source='<EMAIL>', Destination={ 'ToAddresses': [], 'CcAddresses': [], 'BccAddresses': [] },",
"Message={ 'Subject': {'Data': f\"from {msg['reply_address']}\"}, 'Body': {'Text': {'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']], ) except ClientError",
"response = client_ses.send_email( Source='<EMAIL>', Destination={ 'ToAddresses': [], 'CcAddresses': [], 'BccAddresses': [] }, Message={",
"f\"from {msg['reply_address']}\"}, 'Body': {'Text': {'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']], ) except ClientError as e: output",
"botocore.exceptions import ClientError # from email.mime.multipart import MIMEMultipart # from email.mime.text import MIMEText",
"return output def lambda_handler(event, context): # print(event) # print(event['reply_address']) print(send_mail(event)) return { 'statusCode':",
"= client_ses.send_email( Source='<EMAIL>', Destination={ 'ToAddresses': [], 'CcAddresses': [], 'BccAddresses': [] }, Message={ 'Subject':",
"import boto3 from botocore.exceptions import ClientError # from email.mime.multipart import MIMEMultipart # from",
"boto3 from botocore.exceptions import ClientError # from email.mime.multipart import MIMEMultipart # from email.mime.text",
"MIMEText # from email.mime.application import MIMEApplication region = os.environ['Region'] def send_mail(msg): client_ses =",
"{'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']], ) except ClientError as e: output = e.response['Error']['Message'] else: output",
"'Subject': {'Data': f\"from {msg['reply_address']}\"}, 'Body': {'Text': {'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']], ) except ClientError as",
"def lambda_handler(event, context): # print(event) # print(event['reply_address']) print(send_mail(event)) return { 'statusCode': 200, 'body':",
"os.environ['Region'] def send_mail(msg): client_ses = boto3.client('ses', region) try: verify = client_ses.verify_email_address(EmailAddress=msg) response =",
"e.response['Error']['Message'] else: output = \"Email sent! Message ID: \" + response['MessageId'] return output",
"import MIMEApplication region = os.environ['Region'] def send_mail(msg): client_ses = boto3.client('ses', region) try: verify",
"ClientError # from email.mime.multipart import MIMEMultipart # from email.mime.text import MIMEText # from",
"# from email.mime.multipart import MIMEMultipart # from email.mime.text import MIMEText # from email.mime.application",
"email.mime.text import MIMEText # from email.mime.application import MIMEApplication region = os.environ['Region'] def send_mail(msg):",
"from email.mime.multipart import MIMEMultipart # from email.mime.text import MIMEText # from email.mime.application import",
"}, Message={ 'Subject': {'Data': f\"from {msg['reply_address']}\"}, 'Body': {'Text': {'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']], ) except",
"lambda_handler(event, context): # print(event) # print(event['reply_address']) print(send_mail(event)) return { 'statusCode': 200, 'body': json.dumps('Hello",
"Destination={ 'ToAddresses': [], 'CcAddresses': [], 'BccAddresses': [] }, Message={ 'Subject': {'Data': f\"from {msg['reply_address']}\"},",
"Message ID: \" + response['MessageId'] return output def lambda_handler(event, context): # print(event) #",
"from email.mime.application import MIMEApplication region = os.environ['Region'] def send_mail(msg): client_ses = boto3.client('ses', region)",
"MIMEApplication region = os.environ['Region'] def send_mail(msg): client_ses = boto3.client('ses', region) try: verify =",
"import ClientError # from email.mime.multipart import MIMEMultipart # from email.mime.text import MIMEText #",
"else: output = \"Email sent! Message ID: \" + response['MessageId'] return output def",
"= client_ses.verify_email_address(EmailAddress=msg) response = client_ses.send_email( Source='<EMAIL>', Destination={ 'ToAddresses': [], 'CcAddresses': [], 'BccAddresses': []",
"= boto3.client('ses', region) try: verify = client_ses.verify_email_address(EmailAddress=msg) response = client_ses.send_email( Source='<EMAIL>', Destination={ 'ToAddresses':",
"import json import os import boto3 from botocore.exceptions import ClientError # from email.mime.multipart",
"\"Email sent! Message ID: \" + response['MessageId'] return output def lambda_handler(event, context): #",
"try: verify = client_ses.verify_email_address(EmailAddress=msg) response = client_ses.send_email( Source='<EMAIL>', Destination={ 'ToAddresses': [], 'CcAddresses': [],",
"{'Data': f\"from {msg['reply_address']}\"}, 'Body': {'Text': {'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']], ) except ClientError as e:",
"email.mime.application import MIMEApplication region = os.environ['Region'] def send_mail(msg): client_ses = boto3.client('ses', region) try:",
"client_ses = boto3.client('ses', region) try: verify = client_ses.verify_email_address(EmailAddress=msg) response = client_ses.send_email( Source='<EMAIL>', Destination={",
"def send_mail(msg): client_ses = boto3.client('ses', region) try: verify = client_ses.verify_email_address(EmailAddress=msg) response = client_ses.send_email(",
"except ClientError as e: output = e.response['Error']['Message'] else: output = \"Email sent! Message",
"output = \"Email sent! Message ID: \" + response['MessageId'] return output def lambda_handler(event,",
"email.mime.multipart import MIMEMultipart # from email.mime.text import MIMEText # from email.mime.application import MIMEApplication",
"boto3.client('ses', region) try: verify = client_ses.verify_email_address(EmailAddress=msg) response = client_ses.send_email( Source='<EMAIL>', Destination={ 'ToAddresses': [],",
"output def lambda_handler(event, context): # print(event) # print(event['reply_address']) print(send_mail(event)) return { 'statusCode': 200,",
"'BccAddresses': [] }, Message={ 'Subject': {'Data': f\"from {msg['reply_address']}\"}, 'Body': {'Text': {'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']],",
"from botocore.exceptions import ClientError # from email.mime.multipart import MIMEMultipart # from email.mime.text import",
"[] }, Message={ 'Subject': {'Data': f\"from {msg['reply_address']}\"}, 'Body': {'Text': {'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']], )",
"[], 'BccAddresses': [] }, Message={ 'Subject': {'Data': f\"from {msg['reply_address']}\"}, 'Body': {'Text': {'Data': msg['body']}}},",
"= os.environ['Region'] def send_mail(msg): client_ses = boto3.client('ses', region) try: verify = client_ses.verify_email_address(EmailAddress=msg) response",
"'Body': {'Text': {'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']], ) except ClientError as e: output = e.response['Error']['Message']",
"# from email.mime.text import MIMEText # from email.mime.application import MIMEApplication region = os.environ['Region']",
"e: output = e.response['Error']['Message'] else: output = \"Email sent! Message ID: \" +",
"<reponame>Hraesvel/portfolioSite_flutter import json import os import boto3 from botocore.exceptions import ClientError # from",
"ClientError as e: output = e.response['Error']['Message'] else: output = \"Email sent! Message ID:",
"'ToAddresses': [], 'CcAddresses': [], 'BccAddresses': [] }, Message={ 'Subject': {'Data': f\"from {msg['reply_address']}\"}, 'Body':",
"json import os import boto3 from botocore.exceptions import ClientError # from email.mime.multipart import",
"ReplyToAddresses=[msg['reply_address']], ) except ClientError as e: output = e.response['Error']['Message'] else: output = \"Email",
"import MIMEText # from email.mime.application import MIMEApplication region = os.environ['Region'] def send_mail(msg): client_ses",
"= e.response['Error']['Message'] else: output = \"Email sent! Message ID: \" + response['MessageId'] return",
"region = os.environ['Region'] def send_mail(msg): client_ses = boto3.client('ses', region) try: verify = client_ses.verify_email_address(EmailAddress=msg)",
"context): # print(event) # print(event['reply_address']) print(send_mail(event)) return { 'statusCode': 200, 'body': json.dumps('Hello from",
"{msg['reply_address']}\"}, 'Body': {'Text': {'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']], ) except ClientError as e: output =",
"{'Text': {'Data': msg['body']}}}, ReplyToAddresses=[msg['reply_address']], ) except ClientError as e: output = e.response['Error']['Message'] else:",
"# print(event) # print(event['reply_address']) print(send_mail(event)) return { 'statusCode': 200, 'body': json.dumps('Hello from Lambda!')",
"verify = client_ses.verify_email_address(EmailAddress=msg) response = client_ses.send_email( Source='<EMAIL>', Destination={ 'ToAddresses': [], 'CcAddresses': [], 'BccAddresses':",
"send_mail(msg): client_ses = boto3.client('ses', region) try: verify = client_ses.verify_email_address(EmailAddress=msg) response = client_ses.send_email( Source='<EMAIL>',",
"import os import boto3 from botocore.exceptions import ClientError # from email.mime.multipart import MIMEMultipart",
") except ClientError as e: output = e.response['Error']['Message'] else: output = \"Email sent!",
"\" + response['MessageId'] return output def lambda_handler(event, context): # print(event) # print(event['reply_address']) print(send_mail(event))",
"+ response['MessageId'] return output def lambda_handler(event, context): # print(event) # print(event['reply_address']) print(send_mail(event)) return",
"Source='<EMAIL>', Destination={ 'ToAddresses': [], 'CcAddresses': [], 'BccAddresses': [] }, Message={ 'Subject': {'Data': f\"from",
"output = e.response['Error']['Message'] else: output = \"Email sent! Message ID: \" + response['MessageId']",
"as e: output = e.response['Error']['Message'] else: output = \"Email sent! Message ID: \"",
"msg['body']}}}, ReplyToAddresses=[msg['reply_address']], ) except ClientError as e: output = e.response['Error']['Message'] else: output =",
"# from email.mime.application import MIMEApplication region = os.environ['Region'] def send_mail(msg): client_ses = boto3.client('ses',",
"os import boto3 from botocore.exceptions import ClientError # from email.mime.multipart import MIMEMultipart #",
"[], 'CcAddresses': [], 'BccAddresses': [] }, Message={ 'Subject': {'Data': f\"from {msg['reply_address']}\"}, 'Body': {'Text':"
] |
[
"class AlexNet(nn.Module): def __init__(self): super(AlexNet, self).__init__() self.conv_layers = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2,",
"7, out_features=10) def forward(self, x): x = self.conv1(x) x = self.conv2(x) x =",
"stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), ) self.fc_layers",
"nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), ) self.fc_layers = nn.Sequential( nn.Linear(256 * 1 * 1, 4096),",
"= self.conv1(x) x = self.conv2(x) x = x.view(x.size(0), -1) output = self.out(x) return",
"padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), ) self.fc_layers =",
"x = self.conv_layers(x) x = x.view(x.size(0), -1) output = self.fc_layers(x) return output, x",
"-1) output = self.out(x) return output, x class AlexNet(nn.Module): def __init__(self): super(AlexNet, self).__init__()",
"forward(self, x): x = self.conv_layers(x) x = x.view(x.size(0), -1) output = self.fc_layers(x) return",
"kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384,",
"stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), ) self.fc_layers = nn.Sequential( nn.Linear(256 * 1 *",
"nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True),",
"= nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.out = nn.Linear(in_features=32",
"padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1),",
"* 1, 4096), nn.Dropout(0.5), nn.Linear(4096, 4096), nn.Dropout(0.5), nn.Linear(4096, 10), ) def forward(self, x):",
") def forward(self, x): x = self.conv_layers(x) x = x.view(x.size(0), -1) output =",
"nn.Dropout(0.5), nn.Linear(4096, 4096), nn.Dropout(0.5), nn.Linear(4096, 10), ) def forward(self, x): x = self.conv_layers(x)",
"* 1 * 1, 4096), nn.Dropout(0.5), nn.Linear(4096, 4096), nn.Dropout(0.5), nn.Linear(4096, 10), ) def",
"10), ) def forward(self, x): x = self.conv_layers(x) x = x.view(x.size(0), -1) output",
"x = self.conv1(x) x = self.conv2(x) x = x.view(x.size(0), -1) output = self.out(x)",
"kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.conv2 = nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1,",
"stride=2), ) self.fc_layers = nn.Sequential( nn.Linear(256 * 1 * 1, 4096), nn.Dropout(0.5), nn.Linear(4096,",
"nn class CNN(nn.Module): def __init__(self): super(CNN, self).__init__() self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5,",
"nn.Sequential( nn.Linear(256 * 1 * 1, 4096), nn.Dropout(0.5), nn.Linear(4096, 4096), nn.Dropout(0.5), nn.Linear(4096, 10),",
"CNN(nn.Module): def __init__(self): super(CNN, self).__init__() self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2),",
"= nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.conv2 = nn.Sequential(",
"self.fc_layers = nn.Sequential( nn.Linear(256 * 1 * 1, 4096), nn.Dropout(0.5), nn.Linear(4096, 4096), nn.Dropout(0.5),",
"stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.out = nn.Linear(in_features=32 * 7 * 7, out_features=10)",
"import time import torch.nn as nn class CNN(nn.Module): def __init__(self): super(CNN, self).__init__() self.conv1",
"stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1,",
"output, x class AlexNet(nn.Module): def __init__(self): super(AlexNet, self).__init__() self.conv_layers = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96,",
"nn.Linear(4096, 10), ) def forward(self, x): x = self.conv_layers(x) x = x.view(x.size(0), -1)",
"nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.out = nn.Linear(in_features=32 * 7 * 7, out_features=10) def forward(self,",
"nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3,",
"nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.conv2 = nn.Sequential( nn.Conv2d(in_channels=16,",
"stride=2, padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2),",
"* 7 * 7, out_features=10) def forward(self, x): x = self.conv1(x) x =",
"out_features=10) def forward(self, x): x = self.conv1(x) x = self.conv2(x) x = x.view(x.size(0),",
"padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256,",
"= self.conv2(x) x = x.view(x.size(0), -1) output = self.out(x) return output, x class",
"stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384,",
"time import torch.nn as nn class CNN(nn.Module): def __init__(self): super(CNN, self).__init__() self.conv1 =",
"nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.out = nn.Linear(in_features=32 * 7",
"self.conv1(x) x = self.conv2(x) x = x.view(x.size(0), -1) output = self.out(x) return output,",
"x class AlexNet(nn.Module): def __init__(self): super(AlexNet, self).__init__() self.conv_layers = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3,",
"out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256,",
"stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.conv2 = nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2),",
"padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.out = nn.Linear(in_features=32 * 7 * 7, out_features=10) def",
") self.fc_layers = nn.Sequential( nn.Linear(256 * 1 * 1, 4096), nn.Dropout(0.5), nn.Linear(4096, 4096),",
"self).__init__() self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.conv2",
"nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3,",
"self.conv2 = nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.out =",
"torch import time import torch.nn as nn class CNN(nn.Module): def __init__(self): super(CNN, self).__init__()",
"def __init__(self): super(AlexNet, self).__init__() self.conv_layers = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5), nn.ReLU(inplace=True),",
"x): x = self.conv_layers(x) x = x.view(x.size(0), -1) output = self.fc_layers(x) return output,",
"stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True),",
"kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.out = nn.Linear(in_features=32 * 7 * 7,",
"= x.view(x.size(0), -1) output = self.out(x) return output, x class AlexNet(nn.Module): def __init__(self):",
") self.out = nn.Linear(in_features=32 * 7 * 7, out_features=10) def forward(self, x): x",
"as nn class CNN(nn.Module): def __init__(self): super(CNN, self).__init__() self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16,",
"nn.Linear(256 * 1 * 1, 4096), nn.Dropout(0.5), nn.Linear(4096, 4096), nn.Dropout(0.5), nn.Linear(4096, 10), )",
"kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3,",
"out_channels=256, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), ) self.fc_layers = nn.Sequential( nn.Linear(256 *",
"nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2),",
"nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1,",
"out_channels=256, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True),",
"nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.conv2 = nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32,",
"4096), nn.Dropout(0.5), nn.Linear(4096, 10), ) def forward(self, x): x = self.conv_layers(x) x =",
"4096), nn.Dropout(0.5), nn.Linear(4096, 4096), nn.Dropout(0.5), nn.Linear(4096, 10), ) def forward(self, x): x =",
"self.conv_layers = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96, out_channels=256,",
"padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.conv2 = nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True),",
"nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1),",
"nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.out = nn.Linear(in_features=32 *",
"self.out(x) return output, x class AlexNet(nn.Module): def __init__(self): super(AlexNet, self).__init__() self.conv_layers = nn.Sequential(",
"stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1,",
"__init__(self): super(AlexNet, self).__init__() self.conv_layers = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2,",
"nn.Linear(in_features=32 * 7 * 7, out_features=10) def forward(self, x): x = self.conv1(x) x",
") self.conv2 = nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.out",
"nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), ) self.fc_layers = nn.Sequential( nn.Linear(256",
"nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1,",
"import torch.nn as nn class CNN(nn.Module): def __init__(self): super(CNN, self).__init__() self.conv1 = nn.Sequential(",
"kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), )",
"x = self.conv2(x) x = x.view(x.size(0), -1) output = self.out(x) return output, x",
"self.conv2(x) x = x.view(x.size(0), -1) output = self.out(x) return output, x class AlexNet(nn.Module):",
"nn.MaxPool2d(kernel_size=2), ) self.conv2 = nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), )",
"nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.conv2 = nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2),",
"padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), ) self.fc_layers = nn.Sequential( nn.Linear(256 * 1 * 1,",
"1 * 1, 4096), nn.Dropout(0.5), nn.Linear(4096, 4096), nn.Dropout(0.5), nn.Linear(4096, 10), ) def forward(self,",
"import torch import time import torch.nn as nn class CNN(nn.Module): def __init__(self): super(CNN,",
"= nn.Linear(in_features=32 * 7 * 7, out_features=10) def forward(self, x): x = self.conv1(x)",
"= self.out(x) return output, x class AlexNet(nn.Module): def __init__(self): super(AlexNet, self).__init__() self.conv_layers =",
"padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3,",
"nn.MaxPool2d(kernel_size=3, stride=2), ) self.fc_layers = nn.Sequential( nn.Linear(256 * 1 * 1, 4096), nn.Dropout(0.5),",
"kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), ) self.fc_layers = nn.Sequential( nn.Linear(256 * 1",
"class CNN(nn.Module): def __init__(self): super(CNN, self).__init__() self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1,",
"x = x.view(x.size(0), -1) output = self.out(x) return output, x class AlexNet(nn.Module): def",
"torch.nn as nn class CNN(nn.Module): def __init__(self): super(CNN, self).__init__() self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1,",
"nn.MaxPool2d(kernel_size=2), ) self.out = nn.Linear(in_features=32 * 7 * 7, out_features=10) def forward(self, x):",
"* 7, out_features=10) def forward(self, x): x = self.conv1(x) x = self.conv2(x) x",
"def forward(self, x): x = self.conv1(x) x = self.conv2(x) x = x.view(x.size(0), -1)",
"out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2),",
"= nn.Sequential( nn.Linear(256 * 1 * 1, 4096), nn.Dropout(0.5), nn.Linear(4096, 4096), nn.Dropout(0.5), nn.Linear(4096,",
"self).__init__() self.conv_layers = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96,",
"nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=384, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.Conv2d(in_channels=384,",
"nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384, kernel_size=3, stride=1, padding=1),",
"nn.ReLU(inplace=True), nn.Conv2d(in_channels=384, out_channels=256, kernel_size=3, stride=1, padding=1), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), ) self.fc_layers = nn.Sequential(",
"7 * 7, out_features=10) def forward(self, x): x = self.conv1(x) x = self.conv2(x)",
"nn.Linear(4096, 4096), nn.Dropout(0.5), nn.Linear(4096, 10), ) def forward(self, x): x = self.conv_layers(x) x",
"nn.Dropout(0.5), nn.Linear(4096, 10), ) def forward(self, x): x = self.conv_layers(x) x = x.view(x.size(0),",
"def forward(self, x): x = self.conv_layers(x) x = x.view(x.size(0), -1) output = self.fc_layers(x)",
"self.out = nn.Linear(in_features=32 * 7 * 7, out_features=10) def forward(self, x): x =",
"AlexNet(nn.Module): def __init__(self): super(AlexNet, self).__init__() self.conv_layers = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5),",
"return output, x class AlexNet(nn.Module): def __init__(self): super(AlexNet, self).__init__() self.conv_layers = nn.Sequential( nn.Conv2d(in_channels=1,",
"output = self.out(x) return output, x class AlexNet(nn.Module): def __init__(self): super(AlexNet, self).__init__() self.conv_layers",
"out_channels=96, kernel_size=3, stride=2, padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True),",
"x.view(x.size(0), -1) output = self.out(x) return output, x class AlexNet(nn.Module): def __init__(self): super(AlexNet,",
"x): x = self.conv1(x) x = self.conv2(x) x = x.view(x.size(0), -1) output =",
"forward(self, x): x = self.conv1(x) x = self.conv2(x) x = x.view(x.size(0), -1) output",
"__init__(self): super(CNN, self).__init__() self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2),",
"= nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5,",
"self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.conv2 =",
"out_channels=16, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.conv2 = nn.Sequential( nn.Conv2d(in_channels=16, out_channels=32, kernel_size=5,",
"nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3, stride=2), nn.Conv2d(in_channels=256, out_channels=384,",
"out_channels=32, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), ) self.out = nn.Linear(in_features=32 * 7 *",
"super(AlexNet, self).__init__() self.conv_layers = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=96, kernel_size=3, stride=2, padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2),",
"kernel_size=3, stride=2, padding=5), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2, stride=2), nn.Conv2d(in_channels=96, out_channels=256, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=3,",
"1, 4096), nn.Dropout(0.5), nn.Linear(4096, 4096), nn.Dropout(0.5), nn.Linear(4096, 10), ) def forward(self, x): x",
"def __init__(self): super(CNN, self).__init__() self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True),",
"super(CNN, self).__init__() self.conv1 = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=5, stride=1, padding=2), nn.ReLU(inplace=True), nn.MaxPool2d(kernel_size=2), )"
] |
[
"jslexer importlib.reload(babel.messages.jslexer) # babel.messages.extract is not changed, so we can use directly from",
"# babel.messages.extract is not changed, so we can use directly from babel.messages.extract import",
"is not changed, so we can use directly from babel.messages.extract import extract_javascript yield",
"= sys.path old_modules = sys.modules sys.modules = old_modules.copy() sys.path = sys.path[:] sys.path.insert(0, p)",
"p) try: yield finally: sys.path = old_path sys.modules = old_modules def extract_javascript(fileobj, keywords,",
"import babel importlib.reload(babel) import babel.messages importlib.reload(babel.messages) # this should load our mocked jslexer",
"# replace the jslexer # first, reload all parent namespace so that it",
"extract_javascript(fileobj, keywords, comment_tags, options): # import the original lexer before altering sys.path #",
"lexer before altering sys.path # this way, our mocked tokenizer can still access",
"it can adapt the new sys.path... import babel importlib.reload(babel) import babel.messages importlib.reload(babel.messages) #",
"importlib # https://stackoverflow.com/a/41904558/2281355 @contextmanager def add_to_path(p): import sys old_path = sys.path old_modules =",
"old_modules = sys.modules sys.modules = old_modules.copy() sys.path = sys.path[:] sys.path.insert(0, p) try: yield",
"old_modules.copy() sys.path = sys.path[:] sys.path.insert(0, p) try: yield finally: sys.path = old_path sys.modules",
"not changed, so we can use directly from babel.messages.extract import extract_javascript yield from",
"finally: sys.path = old_path sys.modules = old_modules def extract_javascript(fileobj, keywords, comment_tags, options): #",
"sys.path old_modules = sys.modules sys.modules = old_modules.copy() sys.path = sys.path[:] sys.path.insert(0, p) try:",
"# import the original lexer before altering sys.path # this way, our mocked",
"add_to_path(p): import sys old_path = sys.path old_modules = sys.modules sys.modules = old_modules.copy() sys.path",
"sys from contextlib import contextmanager from os import path import importlib # https://stackoverflow.com/a/41904558/2281355",
"the original lexer # and utilities import babel.messages.jslexer with add_to_path(path.dirname(__file__)): # replace the",
"import sys old_path = sys.path old_modules = sys.modules sys.modules = old_modules.copy() sys.path =",
"import babel.messages.jslexer with add_to_path(path.dirname(__file__)): # replace the jslexer # first, reload all parent",
"before altering sys.path # this way, our mocked tokenizer can still access the",
"sys.path[:] sys.path.insert(0, p) try: yield finally: sys.path = old_path sys.modules = old_modules def",
"load our mocked jslexer importlib.reload(babel.messages.jslexer) # babel.messages.extract is not changed, so we can",
"jslexer # first, reload all parent namespace so that it can adapt the",
"tokenizer can still access the original lexer # and utilities import babel.messages.jslexer with",
"babel importlib.reload(babel) import babel.messages importlib.reload(babel.messages) # this should load our mocked jslexer importlib.reload(babel.messages.jslexer)",
"old_path sys.modules = old_modules def extract_javascript(fileobj, keywords, comment_tags, options): # import the original",
"import path import importlib # https://stackoverflow.com/a/41904558/2281355 @contextmanager def add_to_path(p): import sys old_path =",
"keywords, comment_tags, options): # import the original lexer before altering sys.path # this",
"sys.path = old_path sys.modules = old_modules def extract_javascript(fileobj, keywords, comment_tags, options): # import",
"with add_to_path(path.dirname(__file__)): # replace the jslexer # first, reload all parent namespace so",
"def add_to_path(p): import sys old_path = sys.path old_modules = sys.modules sys.modules = old_modules.copy()",
"sys.path = sys.path[:] sys.path.insert(0, p) try: yield finally: sys.path = old_path sys.modules =",
"we can use directly from babel.messages.extract import extract_javascript yield from extract_javascript(fileobj, keywords, comment_tags,",
"# and utilities import babel.messages.jslexer with add_to_path(path.dirname(__file__)): # replace the jslexer # first,",
"the original lexer before altering sys.path # this way, our mocked tokenizer can",
"parent namespace so that it can adapt the new sys.path... import babel importlib.reload(babel)",
"import sys from contextlib import contextmanager from os import path import importlib #",
"= sys.modules sys.modules = old_modules.copy() sys.path = sys.path[:] sys.path.insert(0, p) try: yield finally:",
"import the original lexer before altering sys.path # this way, our mocked tokenizer",
"= old_path sys.modules = old_modules def extract_javascript(fileobj, keywords, comment_tags, options): # import the",
"= old_modules def extract_javascript(fileobj, keywords, comment_tags, options): # import the original lexer before",
"import babel.messages importlib.reload(babel.messages) # this should load our mocked jslexer importlib.reload(babel.messages.jslexer) # babel.messages.extract",
"contextlib import contextmanager from os import path import importlib # https://stackoverflow.com/a/41904558/2281355 @contextmanager def",
"changed, so we can use directly from babel.messages.extract import extract_javascript yield from extract_javascript(fileobj,",
"should load our mocked jslexer importlib.reload(babel.messages.jslexer) # babel.messages.extract is not changed, so we",
"# this way, our mocked tokenizer can still access the original lexer #",
"os import path import importlib # https://stackoverflow.com/a/41904558/2281355 @contextmanager def add_to_path(p): import sys old_path",
"so we can use directly from babel.messages.extract import extract_javascript yield from extract_javascript(fileobj, keywords,",
"babel.messages.extract is not changed, so we can use directly from babel.messages.extract import extract_javascript",
"# https://stackoverflow.com/a/41904558/2281355 @contextmanager def add_to_path(p): import sys old_path = sys.path old_modules = sys.modules",
"first, reload all parent namespace so that it can adapt the new sys.path...",
"try: yield finally: sys.path = old_path sys.modules = old_modules def extract_javascript(fileobj, keywords, comment_tags,",
"can still access the original lexer # and utilities import babel.messages.jslexer with add_to_path(path.dirname(__file__)):",
"namespace so that it can adapt the new sys.path... import babel importlib.reload(babel) import",
"adapt the new sys.path... import babel importlib.reload(babel) import babel.messages importlib.reload(babel.messages) # this should",
"reload all parent namespace so that it can adapt the new sys.path... import",
"all parent namespace so that it can adapt the new sys.path... import babel",
"sys.modules = old_modules.copy() sys.path = sys.path[:] sys.path.insert(0, p) try: yield finally: sys.path =",
"path import importlib # https://stackoverflow.com/a/41904558/2281355 @contextmanager def add_to_path(p): import sys old_path = sys.path",
"the new sys.path... import babel importlib.reload(babel) import babel.messages importlib.reload(babel.messages) # this should load",
"= sys.path[:] sys.path.insert(0, p) try: yield finally: sys.path = old_path sys.modules = old_modules",
"comment_tags, options): # import the original lexer before altering sys.path # this way,",
"so that it can adapt the new sys.path... import babel importlib.reload(babel) import babel.messages",
"and utilities import babel.messages.jslexer with add_to_path(path.dirname(__file__)): # replace the jslexer # first, reload",
"= old_modules.copy() sys.path = sys.path[:] sys.path.insert(0, p) try: yield finally: sys.path = old_path",
"this way, our mocked tokenizer can still access the original lexer # and",
"importlib.reload(babel.messages) # this should load our mocked jslexer importlib.reload(babel.messages.jslexer) # babel.messages.extract is not",
"add_to_path(path.dirname(__file__)): # replace the jslexer # first, reload all parent namespace so that",
"utilities import babel.messages.jslexer with add_to_path(path.dirname(__file__)): # replace the jslexer # first, reload all",
"replace the jslexer # first, reload all parent namespace so that it can",
"# this should load our mocked jslexer importlib.reload(babel.messages.jslexer) # babel.messages.extract is not changed,",
"our mocked jslexer importlib.reload(babel.messages.jslexer) # babel.messages.extract is not changed, so we can use",
"# first, reload all parent namespace so that it can adapt the new",
"sys.path.insert(0, p) try: yield finally: sys.path = old_path sys.modules = old_modules def extract_javascript(fileobj,",
"sys.path... import babel importlib.reload(babel) import babel.messages importlib.reload(babel.messages) # this should load our mocked",
"https://stackoverflow.com/a/41904558/2281355 @contextmanager def add_to_path(p): import sys old_path = sys.path old_modules = sys.modules sys.modules",
"babel.messages importlib.reload(babel.messages) # this should load our mocked jslexer importlib.reload(babel.messages.jslexer) # babel.messages.extract is",
"options): # import the original lexer before altering sys.path # this way, our",
"original lexer before altering sys.path # this way, our mocked tokenizer can still",
"sys.modules sys.modules = old_modules.copy() sys.path = sys.path[:] sys.path.insert(0, p) try: yield finally: sys.path",
"old_path = sys.path old_modules = sys.modules sys.modules = old_modules.copy() sys.path = sys.path[:] sys.path.insert(0,",
"can use directly from babel.messages.extract import extract_javascript yield from extract_javascript(fileobj, keywords, comment_tags, options)",
"original lexer # and utilities import babel.messages.jslexer with add_to_path(path.dirname(__file__)): # replace the jslexer",
"access the original lexer # and utilities import babel.messages.jslexer with add_to_path(path.dirname(__file__)): # replace",
"way, our mocked tokenizer can still access the original lexer # and utilities",
"our mocked tokenizer can still access the original lexer # and utilities import",
"mocked jslexer importlib.reload(babel.messages.jslexer) # babel.messages.extract is not changed, so we can use directly",
"importlib.reload(babel.messages.jslexer) # babel.messages.extract is not changed, so we can use directly from babel.messages.extract",
"from os import path import importlib # https://stackoverflow.com/a/41904558/2281355 @contextmanager def add_to_path(p): import sys",
"sys.path # this way, our mocked tokenizer can still access the original lexer",
"babel.messages.jslexer with add_to_path(path.dirname(__file__)): # replace the jslexer # first, reload all parent namespace",
"this should load our mocked jslexer importlib.reload(babel.messages.jslexer) # babel.messages.extract is not changed, so",
"sys.modules = old_modules def extract_javascript(fileobj, keywords, comment_tags, options): # import the original lexer",
"import contextmanager from os import path import importlib # https://stackoverflow.com/a/41904558/2281355 @contextmanager def add_to_path(p):",
"old_modules def extract_javascript(fileobj, keywords, comment_tags, options): # import the original lexer before altering",
"import importlib # https://stackoverflow.com/a/41904558/2281355 @contextmanager def add_to_path(p): import sys old_path = sys.path old_modules",
"from contextlib import contextmanager from os import path import importlib # https://stackoverflow.com/a/41904558/2281355 @contextmanager",
"altering sys.path # this way, our mocked tokenizer can still access the original",
"the jslexer # first, reload all parent namespace so that it can adapt",
"new sys.path... import babel importlib.reload(babel) import babel.messages importlib.reload(babel.messages) # this should load our",
"importlib.reload(babel) import babel.messages importlib.reload(babel.messages) # this should load our mocked jslexer importlib.reload(babel.messages.jslexer) #",
"yield finally: sys.path = old_path sys.modules = old_modules def extract_javascript(fileobj, keywords, comment_tags, options):",
"that it can adapt the new sys.path... import babel importlib.reload(babel) import babel.messages importlib.reload(babel.messages)",
"sys old_path = sys.path old_modules = sys.modules sys.modules = old_modules.copy() sys.path = sys.path[:]",
"can adapt the new sys.path... import babel importlib.reload(babel) import babel.messages importlib.reload(babel.messages) # this",
"still access the original lexer # and utilities import babel.messages.jslexer with add_to_path(path.dirname(__file__)): #",
"lexer # and utilities import babel.messages.jslexer with add_to_path(path.dirname(__file__)): # replace the jslexer #",
"contextmanager from os import path import importlib # https://stackoverflow.com/a/41904558/2281355 @contextmanager def add_to_path(p): import",
"@contextmanager def add_to_path(p): import sys old_path = sys.path old_modules = sys.modules sys.modules =",
"mocked tokenizer can still access the original lexer # and utilities import babel.messages.jslexer",
"def extract_javascript(fileobj, keywords, comment_tags, options): # import the original lexer before altering sys.path"
] |
[
"result record = cursor.fetchone() print(\"You are connected to - \", record, \"\\n\") print(\"Retrieving",
"soil_profile limit 10\") # Fetch result records = cursor.fetchall() print(f\"soil records = {records}\")",
"try: # Connect to an existing database connection = psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\",",
"cursor = connection.cursor() # Print PostgreSQL details print(\"PostgreSQL server information\") print(connection.get_dsn_parameters(), \"\\n\") #",
"Error try: # Connect to an existing database connection = psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\",",
"psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\", database=\"soildb\") # Create a cursor to perform database operations",
"operations cursor = connection.cursor() # Print PostgreSQL details print(\"PostgreSQL server information\") print(connection.get_dsn_parameters(), \"\\n\")",
"= connection.cursor() # Print PostgreSQL details print(\"PostgreSQL server information\") print(connection.get_dsn_parameters(), \"\\n\") # Executing",
"record, \"\\n\") print(\"Retrieving soil records...\") cursor.execute(\"SELECT * from soil_profile limit 10\") # Fetch",
"except (Exception, Error) as error: print(\"Error while connecting to PostgreSQL\", error) finally: if",
"\"\\n\") # Executing a SQL query cursor.execute(\"SELECT version();\") # Fetch result record =",
"= {records}\") except (Exception, Error) as error: print(\"Error while connecting to PostgreSQL\", error)",
"# Connect to an existing database connection = psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\", database=\"soildb\")",
"- \", record, \"\\n\") print(\"Retrieving soil records...\") cursor.execute(\"SELECT * from soil_profile limit 10\")",
"to perform database operations cursor = connection.cursor() # Print PostgreSQL details print(\"PostgreSQL server",
"existing database connection = psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\", database=\"soildb\") # Create a cursor",
"= cursor.fetchall() print(f\"soil records = {records}\") except (Exception, Error) as error: print(\"Error while",
"perform database operations cursor = connection.cursor() # Print PostgreSQL details print(\"PostgreSQL server information\")",
"from soil_profile limit 10\") # Fetch result records = cursor.fetchall() print(f\"soil records =",
"= psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\", database=\"soildb\") # Create a cursor to perform database",
"server information\") print(connection.get_dsn_parameters(), \"\\n\") # Executing a SQL query cursor.execute(\"SELECT version();\") # Fetch",
"print(\"You are connected to - \", record, \"\\n\") print(\"Retrieving soil records...\") cursor.execute(\"SELECT *",
"# Print PostgreSQL details print(\"PostgreSQL server information\") print(connection.get_dsn_parameters(), \"\\n\") # Executing a SQL",
"database operations cursor = connection.cursor() # Print PostgreSQL details print(\"PostgreSQL server information\") print(connection.get_dsn_parameters(),",
"cursor.execute(\"SELECT version();\") # Fetch result record = cursor.fetchone() print(\"You are connected to -",
"cursor to perform database operations cursor = connection.cursor() # Print PostgreSQL details print(\"PostgreSQL",
"Fetch result record = cursor.fetchone() print(\"You are connected to - \", record, \"\\n\")",
"version();\") # Fetch result record = cursor.fetchone() print(\"You are connected to - \",",
"Fetch result records = cursor.fetchall() print(f\"soil records = {records}\") except (Exception, Error) as",
"connection = psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\", database=\"soildb\") # Create a cursor to perform",
"error: print(\"Error while connecting to PostgreSQL\", error) finally: if connection: cursor.close() connection.close() print(\"PostgreSQL",
"as error: print(\"Error while connecting to PostgreSQL\", error) finally: if connection: cursor.close() connection.close()",
"connected to - \", record, \"\\n\") print(\"Retrieving soil records...\") cursor.execute(\"SELECT * from soil_profile",
"details print(\"PostgreSQL server information\") print(connection.get_dsn_parameters(), \"\\n\") # Executing a SQL query cursor.execute(\"SELECT version();\")",
"query cursor.execute(\"SELECT version();\") # Fetch result record = cursor.fetchone() print(\"You are connected to",
"result records = cursor.fetchall() print(f\"soil records = {records}\") except (Exception, Error) as error:",
"records = cursor.fetchall() print(f\"soil records = {records}\") except (Exception, Error) as error: print(\"Error",
"Create a cursor to perform database operations cursor = connection.cursor() # Print PostgreSQL",
"\"\\n\") print(\"Retrieving soil records...\") cursor.execute(\"SELECT * from soil_profile limit 10\") # Fetch result",
"print(\"Retrieving soil records...\") cursor.execute(\"SELECT * from soil_profile limit 10\") # Fetch result records",
"import psycopg2 from psycopg2 import Error try: # Connect to an existing database",
"cursor.fetchall() print(f\"soil records = {records}\") except (Exception, Error) as error: print(\"Error while connecting",
"connection.cursor() # Print PostgreSQL details print(\"PostgreSQL server information\") print(connection.get_dsn_parameters(), \"\\n\") # Executing a",
"# Executing a SQL query cursor.execute(\"SELECT version();\") # Fetch result record = cursor.fetchone()",
"a SQL query cursor.execute(\"SELECT version();\") # Fetch result record = cursor.fetchone() print(\"You are",
"psycopg2 from psycopg2 import Error try: # Connect to an existing database connection",
"soil records...\") cursor.execute(\"SELECT * from soil_profile limit 10\") # Fetch result records =",
"Print PostgreSQL details print(\"PostgreSQL server information\") print(connection.get_dsn_parameters(), \"\\n\") # Executing a SQL query",
"limit 10\") # Fetch result records = cursor.fetchall() print(f\"soil records = {records}\") except",
"{records}\") except (Exception, Error) as error: print(\"Error while connecting to PostgreSQL\", error) finally:",
"an existing database connection = psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\", database=\"soildb\") # Create a",
"while connecting to PostgreSQL\", error) finally: if connection: cursor.close() connection.close() print(\"PostgreSQL connection is",
"to - \", record, \"\\n\") print(\"Retrieving soil records...\") cursor.execute(\"SELECT * from soil_profile limit",
"\", record, \"\\n\") print(\"Retrieving soil records...\") cursor.execute(\"SELECT * from soil_profile limit 10\") #",
"Executing a SQL query cursor.execute(\"SELECT version();\") # Fetch result record = cursor.fetchone() print(\"You",
"database=\"soildb\") # Create a cursor to perform database operations cursor = connection.cursor() #",
"cursor.fetchone() print(\"You are connected to - \", record, \"\\n\") print(\"Retrieving soil records...\") cursor.execute(\"SELECT",
"print(f\"soil records = {records}\") except (Exception, Error) as error: print(\"Error while connecting to",
"= cursor.fetchone() print(\"You are connected to - \", record, \"\\n\") print(\"Retrieving soil records...\")",
"import Error try: # Connect to an existing database connection = psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\",",
"records...\") cursor.execute(\"SELECT * from soil_profile limit 10\") # Fetch result records = cursor.fetchall()",
"(Exception, Error) as error: print(\"Error while connecting to PostgreSQL\", error) finally: if connection:",
"PostgreSQL details print(\"PostgreSQL server information\") print(connection.get_dsn_parameters(), \"\\n\") # Executing a SQL query cursor.execute(\"SELECT",
"a cursor to perform database operations cursor = connection.cursor() # Print PostgreSQL details",
"10\") # Fetch result records = cursor.fetchall() print(f\"soil records = {records}\") except (Exception,",
"print(\"Error while connecting to PostgreSQL\", error) finally: if connection: cursor.close() connection.close() print(\"PostgreSQL connection",
"port=\"5432\", database=\"soildb\") # Create a cursor to perform database operations cursor = connection.cursor()",
"psycopg2 import Error try: # Connect to an existing database connection = psycopg2.connect(user=\"sa\",",
"* from soil_profile limit 10\") # Fetch result records = cursor.fetchall() print(f\"soil records",
"print(connection.get_dsn_parameters(), \"\\n\") # Executing a SQL query cursor.execute(\"SELECT version();\") # Fetch result record",
"# Fetch result record = cursor.fetchone() print(\"You are connected to - \", record,",
"are connected to - \", record, \"\\n\") print(\"Retrieving soil records...\") cursor.execute(\"SELECT * from",
"# Fetch result records = cursor.fetchall() print(f\"soil records = {records}\") except (Exception, Error)",
"record = cursor.fetchone() print(\"You are connected to - \", record, \"\\n\") print(\"Retrieving soil",
"host=\"127.0.0.1\", port=\"5432\", database=\"soildb\") # Create a cursor to perform database operations cursor =",
"password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\", database=\"soildb\") # Create a cursor to perform database operations cursor",
"SQL query cursor.execute(\"SELECT version();\") # Fetch result record = cursor.fetchone() print(\"You are connected",
"print(\"PostgreSQL server information\") print(connection.get_dsn_parameters(), \"\\n\") # Executing a SQL query cursor.execute(\"SELECT version();\") #",
"Error) as error: print(\"Error while connecting to PostgreSQL\", error) finally: if connection: cursor.close()",
"from psycopg2 import Error try: # Connect to an existing database connection =",
"database connection = psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\", database=\"soildb\") # Create a cursor to",
"to an existing database connection = psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\", database=\"soildb\") # Create",
"connecting to PostgreSQL\", error) finally: if connection: cursor.close() connection.close() print(\"PostgreSQL connection is closed\")",
"records = {records}\") except (Exception, Error) as error: print(\"Error while connecting to PostgreSQL\",",
"Connect to an existing database connection = psycopg2.connect(user=\"sa\", password=\"<PASSWORD>\", host=\"127.0.0.1\", port=\"5432\", database=\"soildb\") #",
"information\") print(connection.get_dsn_parameters(), \"\\n\") # Executing a SQL query cursor.execute(\"SELECT version();\") # Fetch result",
"cursor.execute(\"SELECT * from soil_profile limit 10\") # Fetch result records = cursor.fetchall() print(f\"soil",
"# Create a cursor to perform database operations cursor = connection.cursor() # Print"
] |
[
"class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible Galaxy tool sheds</div>\\n <div class=\"toolFormBody\">\\n <div class=\"form-row\">\\n <table class=\"grid\">\\n ')",
"SOURCE LINE 22 __M_writer(u' <tr class=\"libraryTitle\">\\n <td>\\n <div style=\"float: left; margin-left: 1px;\" class=\"menubutton",
"trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE LINE 22 __M_writer(u' <tr class=\"libraryTitle\">\\n <td>\\n <div style=\"float: left; margin-left:",
"mako import runtime, filters, cache UNDEFINED = runtime.UNDEFINED __M_dict_builtin = dict __M_locals_builtin =",
"SOURCE LINE 20 shed_id = 0 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key",
"__M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for valid tools</a>\\n <a class=\"action-button\" href=\"') #",
"_import_ns.get('h', context.get('h', UNDEFINED)) parent = _import_ns.get('parent', context.get('parent', UNDEFINED)) __M_writer = context.writer() # SOURCE",
"context.caller_stack._push_frame() try: __M_locals = __M_dict_builtin(pageargs=pageargs) _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status =",
"context.get('parent', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 6 __M_writer(u'\\n ') # SOURCE",
"__M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n <div popupmenu=\"dataset-') # SOURCE LINE 27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a",
"= 0 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key",
"message = _import_ns.get('message', context.get('message', UNDEFINED)) trans = _import_ns.get('trans', context.get('trans', UNDEFINED)) render_msg = _import_ns.get('render_msg',",
"if message: # SOURCE LINE 12 __M_writer(u' ') __M_writer(unicode(render_msg( message, status ))) __M_writer(u'\\n')",
"= locals _magic_number = 6 _modified_time = 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii'",
"__M_writer(u'\\n<div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible Galaxy tool sheds</div>\\n <div class=\"toolFormBody\">\\n <div class=\"form-row\">\\n <table class=\"grid\">\\n",
"__M_writer(u' </tr>\\n </table>\\n </div>\\n <div style=\"clear: both\"></div>\\n </div>\\n</div>\\n') return '' finally: context.caller_stack._pop_frame() def",
"SOURCE LINE 36 __M_writer(u' </tr>\\n </table>\\n </div>\\n <div style=\"clear: both\"></div>\\n </div>\\n</div>\\n') return ''",
"in __M_locals_builtin_stored])) __M_writer(u'\\n') pass # SOURCE LINE 36 __M_writer(u' </tr>\\n </table>\\n </div>\\n <div",
"LINE 6 __M_writer(u'\\n ') # SOURCE LINE 7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ') # SOURCE",
"tool_shed_url=url ))) __M_writer(u'\">Browse valid repositories</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 29 __M_writer(unicode(h.url_for(",
"class=\"action-button\" href=\"') # SOURCE LINE 30 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for",
"# SOURCE LINE 36 __M_writer(u' </tr>\\n </table>\\n </div>\\n <div style=\"clear: both\"></div>\\n </div>\\n</div>\\n') return",
"tools</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 30 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url )))",
"_magic_number = 6 _modified_time = 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii' _exports =",
"SOURCE LINE 12 __M_writer(u' ') __M_writer(unicode(render_msg( message, status ))) __M_writer(u'\\n') pass # SOURCE",
"__M_locals_builtin = locals _magic_number = 6 _modified_time = 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time)",
"__M_writer(u'-popup\">\\n <a class=\"action-button\" href=\"') # SOURCE LINE 28 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url )))",
"class=\"toolFormTitle\">Accessible Galaxy tool sheds</div>\\n <div class=\"toolFormBody\">\\n <div class=\"form-row\">\\n <table class=\"grid\">\\n ') # SOURCE",
"pass # SOURCE LINE 14 __M_writer(u'\\n<div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible Galaxy tool sheds</div>\\n <div",
"-*- encoding:ascii -*- from mako import runtime, filters, cache UNDEFINED = runtime.UNDEFINED __M_dict_builtin",
"__M_writer(u'\">Search for valid tools</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 30 __M_writer(unicode(h.url_for( controller='admin_toolshed',",
"= ns def _mako_inherit(template, context): _mako_generate_namespaces(context) return runtime._inherit_from(context, u'/base.mako', _template_uri) def render_body(context,**pageargs): context.caller_stack._push_frame()",
"action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n <div popupmenu=\"dataset-') # SOURCE LINE 27",
"= _import_ns.get('trans', context.get('trans', UNDEFINED)) render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED)) __M_writer = context.writer() #",
"try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer = context.writer() # SOURCE LINE",
"__M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') pass",
"name): try: return context.namespaces[(__name__, name)] except KeyError: _mako_generate_namespaces(context) return context.namespaces[(__name__, name)] def _mako_generate_namespaces(context):",
"popup\" id=\"dataset-') # SOURCE LINE 24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"view-info\" href=\"') # SOURCE",
"__M_writer(u'\\n\\n') # SOURCE LINE 9 __M_writer(u'\\n\\n') # SOURCE LINE 11 if message: #",
"from mako import runtime, filters, cache UNDEFINED = runtime.UNDEFINED __M_dict_builtin = dict __M_locals_builtin",
"') # SOURCE LINE 8 __M_writer(unicode(h.css( \"library\" ))) __M_writer(u'\\n') return '' finally: context.caller_stack._pop_frame()",
"__M_writer = context.writer() # SOURCE LINE 6 __M_writer(u'\\n ') # SOURCE LINE 7",
"UNDEFINED)) render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 1",
"context.get('message', UNDEFINED)) trans = _import_ns.get('trans', context.get('trans', UNDEFINED)) render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED)) __M_writer",
"try: return context.namespaces[(__name__, name)] except KeyError: _mako_generate_namespaces(context) return context.namespaces[(__name__, name)] def _mako_generate_namespaces(context): #",
"__M_writer(u'\\n') # SOURCE LINE 21 for name, url in trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE LINE",
"_import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h = _import_ns.get('h', context.get('h', UNDEFINED)) parent =",
"LINE 9 __M_writer(u'\\n\\n') # SOURCE LINE 11 if message: # SOURCE LINE 12",
"= dict __M_locals_builtin = locals _magic_number = 6 _modified_time = 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako'",
"= ['stylesheets', 'title'] def _mako_get_namespace(context, name): try: return context.namespaces[(__name__, name)] except KeyError: _mako_generate_namespaces(context)",
"runtime._inherit_from(context, u'/base.mako', _template_uri) def render_body(context,**pageargs): context.caller_stack._push_frame() try: __M_locals = __M_dict_builtin(pageargs=pageargs) _import_ns = {}",
"LINE 24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"view-info\" href=\"') # SOURCE LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed',",
"-*- from mako import runtime, filters, cache UNDEFINED = runtime.UNDEFINED __M_dict_builtin = dict",
"def render_stylesheets(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h = _import_ns.get('h',",
"# SOURCE LINE 34 shed_id += 1 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for",
"6 __M_writer(u'\\n ') # SOURCE LINE 7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ') # SOURCE LINE",
"1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii' _exports = ['stylesheets', 'title'] def _mako_get_namespace(context, name):",
"</td>\\n </tr>\\n ') # SOURCE LINE 34 shed_id += 1 __M_locals_builtin_stored = __M_locals_builtin()",
"context.caller_stack._pop_frame() def render_title(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer =",
"') # SOURCE LINE 7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ') # SOURCE LINE 8 __M_writer(unicode(h.css(",
"# SOURCE LINE 4 __M_writer(u'\\n\\n') # SOURCE LINE 9 __M_writer(u'\\n\\n') # SOURCE LINE",
"shed_id += 1 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if",
"id=\"dataset-') # SOURCE LINE 24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"view-info\" href=\"') # SOURCE LINE",
"SOURCE LINE 2 __M_writer(u'\\n\\n') # SOURCE LINE 4 __M_writer(u'\\n\\n') # SOURCE LINE 9",
"return '' finally: context.caller_stack._pop_frame() def render_title(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns,",
"LINE 2 ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')] = ns",
"\"library\" ))) __M_writer(u'\\n') return '' finally: context.caller_stack._pop_frame() def render_title(context): context.caller_stack._push_frame() try: _import_ns =",
"SOURCE LINE 1 __M_writer(u'\\n') # SOURCE LINE 2 __M_writer(u'\\n\\n') # SOURCE LINE 4",
"= _import_ns.get('message', context.get('message', UNDEFINED)) trans = _import_ns.get('trans', context.get('trans', UNDEFINED)) render_msg = _import_ns.get('render_msg', context.get('render_msg',",
"11 if message: # SOURCE LINE 12 __M_writer(u' ') __M_writer(unicode(render_msg( message, status )))",
"# SOURCE LINE 14 __M_writer(u'\\n<div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible Galaxy tool sheds</div>\\n <div class=\"toolFormBody\">\\n",
"workflows</a>\\n </div>\\n </td>\\n </tr>\\n ') # SOURCE LINE 34 shed_id += 1 __M_locals_builtin_stored",
"for valid tools</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 30 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed',",
"return context.namespaces[(__name__, name)] def _mako_generate_namespaces(context): # SOURCE LINE 2 ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(),",
"</div>\\n</div>\\n') return '' finally: context.caller_stack._pop_frame() def render_stylesheets(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context,",
"= {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h = _import_ns.get('h', context.get('h', UNDEFINED)) parent = _import_ns.get('parent',",
"message, status ))) __M_writer(u'\\n') pass # SOURCE LINE 14 __M_writer(u'\\n<div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible",
"# SOURCE LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n",
"# SOURCE LINE 2 ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')]",
"_import_ns.get('status', context.get('status', UNDEFINED)) h = _import_ns.get('h', context.get('h', UNDEFINED)) message = _import_ns.get('message', context.get('message', UNDEFINED))",
"filters, cache UNDEFINED = runtime.UNDEFINED __M_dict_builtin = dict __M_locals_builtin = locals _magic_number =",
"14 __M_writer(u'\\n<div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible Galaxy tool sheds</div>\\n <div class=\"toolFormBody\">\\n <div class=\"form-row\">\\n <table",
"LINE 21 for name, url in trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE LINE 22 __M_writer(u' <tr",
"finally: context.caller_stack._pop_frame() def render_stylesheets(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h",
"pass # SOURCE LINE 36 __M_writer(u' </tr>\\n </table>\\n </div>\\n <div style=\"clear: both\"></div>\\n </div>\\n</div>\\n')",
"name)] def _mako_generate_namespaces(context): # SOURCE LINE 2 ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None,",
"SOURCE LINE 7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ') # SOURCE LINE 8 __M_writer(unicode(h.css( \"library\" )))",
"tool sheds</div>\\n <div class=\"toolFormBody\">\\n <div class=\"form-row\">\\n <table class=\"grid\">\\n ') # SOURCE LINE 20",
"LINE 30 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for workflows</a>\\n </div>\\n </td>\\n </tr>\\n",
"h = _import_ns.get('h', context.get('h', UNDEFINED)) parent = _import_ns.get('parent', context.get('parent', UNDEFINED)) __M_writer = context.writer()",
"# SOURCE LINE 2 __M_writer(u'\\n\\n') # SOURCE LINE 4 __M_writer(u'\\n\\n') # SOURCE LINE",
"context.namespaces[(__name__, name)] except KeyError: _mako_generate_namespaces(context) return context.namespaces[(__name__, name)] def _mako_generate_namespaces(context): # SOURCE LINE",
"__M_writer(u'-popup\">\\n <a class=\"view-info\" href=\"') # SOURCE LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url )))",
"_import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status = _import_ns.get('status', context.get('status', UNDEFINED)) h =",
"= __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n')",
"Galaxy tool sheds</div>\\n <div class=\"toolFormBody\">\\n <div class=\"form-row\">\\n <table class=\"grid\">\\n ') # SOURCE LINE",
"LINE 12 __M_writer(u' ') __M_writer(unicode(render_msg( message, status ))) __M_writer(u'\\n') pass # SOURCE LINE",
"ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')] = ns def _mako_inherit(template,",
"tool_shed_url=url ))) __M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n <div popupmenu=\"dataset-') # SOURCE LINE 27 __M_writer(unicode(shed_id))",
"class=\"action-button\" href=\"') # SOURCE LINE 29 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for",
"_exports = ['stylesheets', 'title'] def _mako_get_namespace(context, name): try: return context.namespaces[(__name__, name)] except KeyError:",
"runtime, filters, cache UNDEFINED = runtime.UNDEFINED __M_dict_builtin = dict __M_locals_builtin = locals _magic_number",
"))) __M_writer(u'\\n') pass # SOURCE LINE 14 __M_writer(u'\\n<div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible Galaxy tool",
"__M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n <div popupmenu=\"dataset-') # SOURCE",
"href=\"') # SOURCE LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n",
"_mako_get_namespace(context, name): try: return context.namespaces[(__name__, name)] except KeyError: _mako_generate_namespaces(context) return context.namespaces[(__name__, name)] def",
"{} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status = _import_ns.get('status', context.get('status', UNDEFINED)) h = _import_ns.get('h', context.get('h',",
"render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 1 __M_writer(u'\\n')",
"templateuri=u'/message.mako', callables=None, calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')] = ns def _mako_inherit(template, context): _mako_generate_namespaces(context) return runtime._inherit_from(context,",
"KeyError: _mako_generate_namespaces(context) return context.namespaces[(__name__, name)] def _mako_generate_namespaces(context): # SOURCE LINE 2 ns =",
"popupmenu=\"dataset-') # SOURCE LINE 27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"action-button\" href=\"') # SOURCE LINE",
"21 for name, url in trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE LINE 22 __M_writer(u' <tr class=\"libraryTitle\">\\n",
"__M_writer(u'\\n') pass # SOURCE LINE 14 __M_writer(u'\\n<div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible Galaxy tool sheds</div>\\n",
"LINE 11 if message: # SOURCE LINE 12 __M_writer(u' ') __M_writer(unicode(render_msg( message, status",
"LINE 20 shed_id = 0 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in",
"class=\"toolFormBody\">\\n <div class=\"form-row\">\\n <table class=\"grid\">\\n ') # SOURCE LINE 20 shed_id = 0",
"<div style=\"float: left; margin-left: 1px;\" class=\"menubutton split popup\" id=\"dataset-') # SOURCE LINE 24",
"context.get('status', UNDEFINED)) h = _import_ns.get('h', context.get('h', UNDEFINED)) message = _import_ns.get('message', context.get('message', UNDEFINED)) trans",
"# SOURCE LINE 21 for name, url in trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE LINE 22",
"8 __M_writer(unicode(h.css( \"library\" ))) __M_writer(u'\\n') return '' finally: context.caller_stack._pop_frame() def render_title(context): context.caller_stack._push_frame() try:",
"))) __M_writer(u'\\n') return '' finally: context.caller_stack._pop_frame() def render_title(context): context.caller_stack._push_frame() try: _import_ns = {}",
"{} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer = context.writer() # SOURCE LINE 4 __M_writer(u'Configured Galaxy",
"<table class=\"grid\">\\n ') # SOURCE LINE 20 shed_id = 0 __M_locals_builtin_stored = __M_locals_builtin()",
"1px;\" class=\"menubutton split popup\" id=\"dataset-') # SOURCE LINE 24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"view-info\"",
"UNDEFINED = runtime.UNDEFINED __M_dict_builtin = dict __M_locals_builtin = locals _magic_number = 6 _modified_time",
"') __M_writer(unicode(render_msg( message, status ))) __M_writer(u'\\n') pass # SOURCE LINE 14 __M_writer(u'\\n<div class=\"toolForm\">\\n",
"6 _modified_time = 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii' _exports = ['stylesheets', 'title']",
"<div class=\"toolFormTitle\">Accessible Galaxy tool sheds</div>\\n <div class=\"toolFormBody\">\\n <div class=\"form-row\">\\n <table class=\"grid\">\\n ') #",
"name)] except KeyError: _mako_generate_namespaces(context) return context.namespaces[(__name__, name)] def _mako_generate_namespaces(context): # SOURCE LINE 2",
"__M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"action-button\" href=\"') # SOURCE LINE 28 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url",
"_import_ns.get('h', context.get('h', UNDEFINED)) message = _import_ns.get('message', context.get('message', UNDEFINED)) trans = _import_ns.get('trans', context.get('trans', UNDEFINED))",
"def _mako_get_namespace(context, name): try: return context.namespaces[(__name__, name)] except KeyError: _mako_generate_namespaces(context) return context.namespaces[(__name__, name)]",
"margin-left: 1px;\" class=\"menubutton split popup\" id=\"dataset-') # SOURCE LINE 24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a",
"</tr>\\n </table>\\n </div>\\n <div style=\"clear: both\"></div>\\n </div>\\n</div>\\n') return '' finally: context.caller_stack._pop_frame() def render_stylesheets(context):",
"_mako_generate_namespaces(context): # SOURCE LINE 2 ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri) context.namespaces[(__name__,",
"<a class=\"action-button\" href=\"') # SOURCE LINE 29 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search",
"split popup\" id=\"dataset-') # SOURCE LINE 24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"view-info\" href=\"') #",
"if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') # SOURCE LINE 21 for name, url in",
"1 __M_writer(u'\\n') # SOURCE LINE 2 __M_writer(u'\\n\\n') # SOURCE LINE 4 __M_writer(u'\\n\\n') #",
"+= 1 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key",
"SOURCE LINE 14 __M_writer(u'\\n<div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible Galaxy tool sheds</div>\\n <div class=\"toolFormBody\">\\n <div",
"7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ') # SOURCE LINE 8 __M_writer(unicode(h.css( \"library\" ))) __M_writer(u'\\n') return",
"'' finally: context.caller_stack._pop_frame() def render_title(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg'])",
"SOURCE LINE 29 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for valid tools</a>\\n <a",
"SOURCE LINE 21 for name, url in trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE LINE 22 __M_writer(u'",
"__M_writer(u' <tr class=\"libraryTitle\">\\n <td>\\n <div style=\"float: left; margin-left: 1px;\" class=\"menubutton split popup\" id=\"dataset-')",
"34 shed_id += 1 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id']",
"controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n <div popupmenu=\"dataset-') # SOURCE LINE",
"</tr>\\n ') # SOURCE LINE 34 shed_id += 1 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key,",
"LINE 29 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for valid tools</a>\\n <a class=\"action-button\"",
"__M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') pass #",
"= _import_ns.get('h', context.get('h', UNDEFINED)) message = _import_ns.get('message', context.get('message', UNDEFINED)) trans = _import_ns.get('trans', context.get('trans',",
"22 __M_writer(u' <tr class=\"libraryTitle\">\\n <td>\\n <div style=\"float: left; margin-left: 1px;\" class=\"menubutton split popup\"",
"_template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii' _exports = ['stylesheets', 'title'] def _mako_get_namespace(context, name): try: return",
"for __M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') # SOURCE LINE 21",
"# SOURCE LINE 30 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for workflows</a>\\n </div>\\n",
"4 __M_writer(u'\\n\\n') # SOURCE LINE 9 __M_writer(u'\\n\\n') # SOURCE LINE 11 if message:",
"'title'] def _mako_get_namespace(context, name): try: return context.namespaces[(__name__, name)] except KeyError: _mako_generate_namespaces(context) return context.namespaces[(__name__,",
"# SOURCE LINE 11 if message: # SOURCE LINE 12 __M_writer(u' ') __M_writer(unicode(render_msg(",
"LINE 22 __M_writer(u' <tr class=\"libraryTitle\">\\n <td>\\n <div style=\"float: left; margin-left: 1px;\" class=\"menubutton split",
"_mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer = context.writer() # SOURCE LINE 4 __M_writer(u'Configured Galaxy tool",
"LINE 7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ') # SOURCE LINE 8 __M_writer(unicode(h.css( \"library\" ))) __M_writer(u'\\n')",
"if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') pass # SOURCE LINE 36 __M_writer(u' </tr>\\n </table>\\n",
"try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h = _import_ns.get('h', context.get('h', UNDEFINED)) parent",
"href=\"') # SOURCE LINE 28 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Browse valid repositories</a>\\n",
"in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') pass # SOURCE LINE 36 __M_writer(u'",
"__M_dict_builtin = dict __M_locals_builtin = locals _magic_number = 6 _modified_time = 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako'",
"_modified_time = 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii' _exports = ['stylesheets', 'title'] def",
"UNDEFINED)) trans = _import_ns.get('trans', context.get('trans', UNDEFINED)) render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED)) __M_writer =",
"30 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for workflows</a>\\n </div>\\n </td>\\n </tr>\\n ')",
"__M_writer(u'\\n\\n') # SOURCE LINE 4 __M_writer(u'\\n\\n') # SOURCE LINE 9 __M_writer(u'\\n\\n') # SOURCE",
"))) __M_writer(u'\">Search for workflows</a>\\n </div>\\n </td>\\n </tr>\\n ') # SOURCE LINE 34 shed_id",
"_modified_time) _source_encoding='ascii' _exports = ['stylesheets', 'title'] def _mako_get_namespace(context, name): try: return context.namespaces[(__name__, name)]",
"SOURCE LINE 30 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for workflows</a>\\n </div>\\n </td>\\n",
"message: # SOURCE LINE 12 __M_writer(u' ') __M_writer(unicode(render_msg( message, status ))) __M_writer(u'\\n') pass",
"def _mako_generate_namespaces(context): # SOURCE LINE 2 ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri)",
"SOURCE LINE 28 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Browse valid repositories</a>\\n <a class=\"action-button\"",
"render_stylesheets(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h = _import_ns.get('h', context.get('h',",
"<div style=\"clear: both\"></div>\\n </div>\\n</div>\\n') return '' finally: context.caller_stack._pop_frame() def render_stylesheets(context): context.caller_stack._push_frame() try: _import_ns",
"context.writer() # SOURCE LINE 1 __M_writer(u'\\n') # SOURCE LINE 2 __M_writer(u'\\n\\n') # SOURCE",
"36 __M_writer(u' </tr>\\n </table>\\n </div>\\n <div style=\"clear: both\"></div>\\n </div>\\n</div>\\n') return '' finally: context.caller_stack._pop_frame()",
"__M_locals = __M_dict_builtin(pageargs=pageargs) _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status = _import_ns.get('status', context.get('status',",
"# SOURCE LINE 28 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Browse valid repositories</a>\\n <a",
"# SOURCE LINE 8 __M_writer(unicode(h.css( \"library\" ))) __M_writer(u'\\n') return '' finally: context.caller_stack._pop_frame() def",
"shed_id = 0 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if",
"left; margin-left: 1px;\" class=\"menubutton split popup\" id=\"dataset-') # SOURCE LINE 24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n",
"context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')] = ns def _mako_inherit(template, context): _mako_generate_namespaces(context) return",
"SOURCE LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n <div",
"context.writer() # SOURCE LINE 4 __M_writer(u'Configured Galaxy tool sheds') return '' finally: context.caller_stack._pop_frame()",
"__M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') # SOURCE LINE 21 for",
"__M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n <div popupmenu=\"dataset-') # SOURCE LINE 27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"action-button\"",
"return runtime._inherit_from(context, u'/base.mako', _template_uri) def render_body(context,**pageargs): context.caller_stack._push_frame() try: __M_locals = __M_dict_builtin(pageargs=pageargs) _import_ns =",
"__M_writer(u'\">Browse valid repositories</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 29 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed',",
"= _import_ns.get('h', context.get('h', UNDEFINED)) parent = _import_ns.get('parent', context.get('parent', UNDEFINED)) __M_writer = context.writer() #",
"valid tools</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 30 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url",
"') # SOURCE LINE 20 shed_id = 0 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key])",
"<a class=\"action-button\" href=\"') # SOURCE LINE 28 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Browse",
"= runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')] = ns def _mako_inherit(template, context):",
"u'/base.mako', _template_uri) def render_body(context,**pageargs): context.caller_stack._push_frame() try: __M_locals = __M_dict_builtin(pageargs=pageargs) _import_ns = {} _mako_get_namespace(context,",
"controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Browse valid repositories</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE",
"render_body(context,**pageargs): context.caller_stack._push_frame() try: __M_locals = __M_dict_builtin(pageargs=pageargs) _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status",
"= _import_ns.get('render_msg', context.get('render_msg', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 1 __M_writer(u'\\n') #",
"__M_dict_builtin(pageargs=pageargs) _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status = _import_ns.get('status', context.get('status', UNDEFINED)) h",
"ns def _mako_inherit(template, context): _mako_generate_namespaces(context) return runtime._inherit_from(context, u'/base.mako', _template_uri) def render_body(context,**pageargs): context.caller_stack._push_frame() try:",
"LINE 27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"action-button\" href=\"') # SOURCE LINE 28 __M_writer(unicode(h.url_for( controller='admin_toolshed',",
"context.namespaces[(__name__, '__anon_0x7f903c23edd0')] = ns def _mako_inherit(template, context): _mako_generate_namespaces(context) return runtime._inherit_from(context, u'/base.mako', _template_uri) def",
"class=\"view-info\" href=\"') # SOURCE LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">') __M_writer(unicode(name))",
"UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 1 __M_writer(u'\\n') # SOURCE LINE 2",
"return '' finally: context.caller_stack._pop_frame() def render_stylesheets(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns,",
"__M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') pass # SOURCE LINE 36 __M_writer(u' </tr>\\n </table>\\n </div>\\n",
"__M_writer(u'\\n ') # SOURCE LINE 8 __M_writer(unicode(h.css( \"library\" ))) __M_writer(u'\\n') return '' finally:",
"[u'render_msg']) status = _import_ns.get('status', context.get('status', UNDEFINED)) h = _import_ns.get('h', context.get('h', UNDEFINED)) message =",
"[u'render_msg']) __M_writer = context.writer() # SOURCE LINE 4 __M_writer(u'Configured Galaxy tool sheds') return",
"__M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Browse valid repositories</a>\\n <a class=\"action-button\" href=\"') # SOURCE",
"SOURCE LINE 34 shed_id += 1 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key",
"in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') # SOURCE LINE 21 for name,",
"_import_ns.get('message', context.get('message', UNDEFINED)) trans = _import_ns.get('trans', context.get('trans', UNDEFINED)) render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED))",
"25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n <div popupmenu=\"dataset-') #",
"') # SOURCE LINE 34 shed_id += 1 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key])",
"__M_writer(u'\\n ') # SOURCE LINE 7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ') # SOURCE LINE 8",
"27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"action-button\" href=\"') # SOURCE LINE 28 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed',",
"__M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') # SOURCE LINE 21 for name, url in trans.app.tool_shed_registry.tool_sheds.items():",
"LINE 4 __M_writer(u'\\n\\n') # SOURCE LINE 9 __M_writer(u'\\n\\n') # SOURCE LINE 11 if",
"2 __M_writer(u'\\n\\n') # SOURCE LINE 4 __M_writer(u'\\n\\n') # SOURCE LINE 9 __M_writer(u'\\n\\n') #",
"20 shed_id = 0 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id']",
"))) __M_writer(u'\">Search for valid tools</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 30 __M_writer(unicode(h.url_for(",
"SOURCE LINE 4 __M_writer(u'\\n\\n') # SOURCE LINE 9 __M_writer(u'\\n\\n') # SOURCE LINE 11",
"__M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for workflows</a>\\n </div>\\n </td>\\n </tr>\\n ') #",
"[u'render_msg']) h = _import_ns.get('h', context.get('h', UNDEFINED)) parent = _import_ns.get('parent', context.get('parent', UNDEFINED)) __M_writer =",
"# SOURCE LINE 12 __M_writer(u' ') __M_writer(unicode(render_msg( message, status ))) __M_writer(u'\\n') pass #",
"context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer = context.writer() # SOURCE",
"9 __M_writer(u'\\n\\n') # SOURCE LINE 11 if message: # SOURCE LINE 12 __M_writer(u'",
"action='find_tools_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for valid tools</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE",
"action='find_workflows_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for workflows</a>\\n </div>\\n </td>\\n </tr>\\n ') # SOURCE LINE",
"<tr class=\"libraryTitle\">\\n <td>\\n <div style=\"float: left; margin-left: 1px;\" class=\"menubutton split popup\" id=\"dataset-') #",
"<a class=\"view-info\" href=\"') # SOURCE LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">')",
"_mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status = _import_ns.get('status', context.get('status', UNDEFINED)) h = _import_ns.get('h', context.get('h', UNDEFINED))",
"__M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') #",
"= 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii' _exports = ['stylesheets', 'title'] def _mako_get_namespace(context,",
"sheds</div>\\n <div class=\"toolFormBody\">\\n <div class=\"form-row\">\\n <table class=\"grid\">\\n ') # SOURCE LINE 20 shed_id",
"class=\"libraryTitle\">\\n <td>\\n <div style=\"float: left; margin-left: 1px;\" class=\"menubutton split popup\" id=\"dataset-') # SOURCE",
"LINE 8 __M_writer(unicode(h.css( \"library\" ))) __M_writer(u'\\n') return '' finally: context.caller_stack._pop_frame() def render_title(context): context.caller_stack._push_frame()",
"'__anon_0x7f903c23edd0')] = ns def _mako_inherit(template, context): _mako_generate_namespaces(context) return runtime._inherit_from(context, u'/base.mako', _template_uri) def render_body(context,**pageargs):",
"class=\"action-button\" href=\"') # SOURCE LINE 28 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Browse valid",
"style=\"clear: both\"></div>\\n </div>\\n</div>\\n') return '' finally: context.caller_stack._pop_frame() def render_stylesheets(context): context.caller_stack._push_frame() try: _import_ns =",
"encoding:ascii -*- from mako import runtime, filters, cache UNDEFINED = runtime.UNDEFINED __M_dict_builtin =",
"'__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status = _import_ns.get('status', context.get('status', UNDEFINED)) h = _import_ns.get('h', context.get('h', UNDEFINED)) message",
"'' finally: context.caller_stack._pop_frame() def render_stylesheets(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg'])",
"UNDEFINED)) h = _import_ns.get('h', context.get('h', UNDEFINED)) message = _import_ns.get('message', context.get('message', UNDEFINED)) trans =",
"for name, url in trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE LINE 22 __M_writer(u' <tr class=\"libraryTitle\">\\n <td>\\n",
"name, url in trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE LINE 22 __M_writer(u' <tr class=\"libraryTitle\">\\n <td>\\n <div",
"url in trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE LINE 22 __M_writer(u' <tr class=\"libraryTitle\">\\n <td>\\n <div style=\"float:",
"_import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer = context.writer() # SOURCE LINE 4",
"cache UNDEFINED = runtime.UNDEFINED __M_dict_builtin = dict __M_locals_builtin = locals _magic_number = 6",
"['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') pass # SOURCE LINE 36 __M_writer(u' </tr>\\n",
"SOURCE LINE 27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"action-button\" href=\"') # SOURCE LINE 28 __M_writer(unicode(h.url_for(",
"<div class=\"toolFormBody\">\\n <div class=\"form-row\">\\n <table class=\"grid\">\\n ') # SOURCE LINE 20 shed_id =",
"__M_writer = context.writer() # SOURCE LINE 4 __M_writer(u'Configured Galaxy tool sheds') return ''",
"UNDEFINED)) message = _import_ns.get('message', context.get('message', UNDEFINED)) trans = _import_ns.get('trans', context.get('trans', UNDEFINED)) render_msg =",
"__M_writer(u'</a>\\n </div>\\n <div popupmenu=\"dataset-') # SOURCE LINE 27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"action-button\" href=\"')",
"controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for workflows</a>\\n </div>\\n </td>\\n </tr>\\n ') # SOURCE",
"LINE 2 __M_writer(u'\\n\\n') # SOURCE LINE 4 __M_writer(u'\\n\\n') # SOURCE LINE 9 __M_writer(u'\\n\\n')",
"SOURCE LINE 8 __M_writer(unicode(h.css( \"library\" ))) __M_writer(u'\\n') return '' finally: context.caller_stack._pop_frame() def render_title(context):",
"class=\"menubutton split popup\" id=\"dataset-') # SOURCE LINE 24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"view-info\" href=\"')",
"__M_writer(u'\\n\\n') # SOURCE LINE 11 if message: # SOURCE LINE 12 __M_writer(u' ')",
"# SOURCE LINE 6 __M_writer(u'\\n ') # SOURCE LINE 7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ')",
"__M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ') # SOURCE LINE 8 __M_writer(unicode(h.css( \"library\" ))) __M_writer(u'\\n') return ''",
"SOURCE LINE 11 if message: # SOURCE LINE 12 __M_writer(u' ') __M_writer(unicode(render_msg( message,",
"= _import_ns.get('parent', context.get('parent', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 6 __M_writer(u'\\n ')",
"tool_shed_url=url ))) __M_writer(u'\">Search for valid tools</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 30",
"SOURCE LINE 9 __M_writer(u'\\n\\n') # SOURCE LINE 11 if message: # SOURCE LINE",
"'__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h = _import_ns.get('h', context.get('h', UNDEFINED)) parent = _import_ns.get('parent', context.get('parent', UNDEFINED)) __M_writer",
"context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h = _import_ns.get('h', context.get('h', UNDEFINED))",
"SOURCE LINE 2 ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')] =",
"__M_writer(u'\\n') return '' finally: context.caller_stack._pop_frame() def render_title(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context,",
"__M_writer(u'\\n') pass # SOURCE LINE 36 __M_writer(u' </tr>\\n </table>\\n </div>\\n <div style=\"clear: both\"></div>\\n",
"SOURCE LINE 24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"view-info\" href=\"') # SOURCE LINE 25 __M_writer(unicode(h.url_for(",
"'__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer = context.writer() # SOURCE LINE 4 __M_writer(u'Configured Galaxy tool sheds')",
"for workflows</a>\\n </div>\\n </td>\\n </tr>\\n ') # SOURCE LINE 34 shed_id += 1",
"_mako_inherit(template, context): _mako_generate_namespaces(context) return runtime._inherit_from(context, u'/base.mako', _template_uri) def render_body(context,**pageargs): context.caller_stack._push_frame() try: __M_locals =",
"def render_body(context,**pageargs): context.caller_stack._push_frame() try: __M_locals = __M_dict_builtin(pageargs=pageargs) _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg'])",
"__M_writer(unicode(h.css( \"library\" ))) __M_writer(u'\\n') return '' finally: context.caller_stack._pop_frame() def render_title(context): context.caller_stack._push_frame() try: _import_ns",
"try: __M_locals = __M_dict_builtin(pageargs=pageargs) _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status = _import_ns.get('status',",
"status = _import_ns.get('status', context.get('status', UNDEFINED)) h = _import_ns.get('h', context.get('h', UNDEFINED)) message = _import_ns.get('message',",
"_mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h = _import_ns.get('h', context.get('h', UNDEFINED)) parent = _import_ns.get('parent', context.get('parent', UNDEFINED))",
"in __M_locals_builtin_stored])) __M_writer(u'\\n') # SOURCE LINE 21 for name, url in trans.app.tool_shed_registry.tool_sheds.items(): #",
"runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')] = ns def _mako_inherit(template, context): _mako_generate_namespaces(context)",
"= _import_ns.get('status', context.get('status', UNDEFINED)) h = _import_ns.get('h', context.get('h', UNDEFINED)) message = _import_ns.get('message', context.get('message',",
"LINE 14 __M_writer(u'\\n<div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible Galaxy tool sheds</div>\\n <div class=\"toolFormBody\">\\n <div class=\"form-row\">\\n",
"__M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') pass # SOURCE",
"status ))) __M_writer(u'\\n') pass # SOURCE LINE 14 __M_writer(u'\\n<div class=\"toolForm\">\\n <div class=\"toolFormTitle\">Accessible Galaxy",
"['stylesheets', 'title'] def _mako_get_namespace(context, name): try: return context.namespaces[(__name__, name)] except KeyError: _mako_generate_namespaces(context) return",
"__M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') pass # SOURCE LINE 36",
"href=\"') # SOURCE LINE 29 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for valid",
"_template_uri) def render_body(context,**pageargs): context.caller_stack._push_frame() try: __M_locals = __M_dict_builtin(pageargs=pageargs) _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns,",
"LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n <div popupmenu=\"dataset-')",
"class=\"grid\">\\n ') # SOURCE LINE 20 shed_id = 0 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key,",
"# SOURCE LINE 22 __M_writer(u' <tr class=\"libraryTitle\">\\n <td>\\n <div style=\"float: left; margin-left: 1px;\"",
"__M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') # SOURCE LINE",
"context): _mako_generate_namespaces(context) return runtime._inherit_from(context, u'/base.mako', _template_uri) def render_body(context,**pageargs): context.caller_stack._push_frame() try: __M_locals = __M_dict_builtin(pageargs=pageargs)",
"_template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii' _exports = ['stylesheets', 'title'] def _mako_get_namespace(context, name): try: return context.namespaces[(__name__,",
"_import_ns.get('render_msg', context.get('render_msg', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 1 __M_writer(u'\\n') # SOURCE",
"dict __M_locals_builtin = locals _magic_number = 6 _modified_time = 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__,",
"return context.namespaces[(__name__, name)] except KeyError: _mako_generate_namespaces(context) return context.namespaces[(__name__, name)] def _mako_generate_namespaces(context): # SOURCE",
"28 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Browse valid repositories</a>\\n <a class=\"action-button\" href=\"') #",
"trans = _import_ns.get('trans', context.get('trans', UNDEFINED)) render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED)) __M_writer = context.writer()",
"callables=None, calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')] = ns def _mako_inherit(template, context): _mako_generate_namespaces(context) return runtime._inherit_from(context, u'/base.mako',",
"tool_shed_url=url ))) __M_writer(u'\">Search for workflows</a>\\n </div>\\n </td>\\n </tr>\\n ') # SOURCE LINE 34",
"# SOURCE LINE 9 __M_writer(u'\\n\\n') # SOURCE LINE 11 if message: # SOURCE",
"= __M_dict_builtin(pageargs=pageargs) _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status = _import_ns.get('status', context.get('status', UNDEFINED))",
"in trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE LINE 22 __M_writer(u' <tr class=\"libraryTitle\">\\n <td>\\n <div style=\"float: left;",
"__M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') # SOURCE",
"= runtime.UNDEFINED __M_dict_builtin = dict __M_locals_builtin = locals _magic_number = 6 _modified_time =",
"context.get('h', UNDEFINED)) message = _import_ns.get('message', context.get('message', UNDEFINED)) trans = _import_ns.get('trans', context.get('trans', UNDEFINED)) render_msg",
"controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for valid tools</a>\\n <a class=\"action-button\" href=\"') # SOURCE",
"_import_ns.get('trans', context.get('trans', UNDEFINED)) render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED)) __M_writer = context.writer() # SOURCE",
"class=\"form-row\">\\n <table class=\"grid\">\\n ') # SOURCE LINE 20 shed_id = 0 __M_locals_builtin_stored =",
"2 ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako', callables=None, calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')] = ns def",
"</div>\\n <div popupmenu=\"dataset-') # SOURCE LINE 27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"action-button\" href=\"') #",
"__M_writer(u'\">Search for workflows</a>\\n </div>\\n </td>\\n </tr>\\n ') # SOURCE LINE 34 shed_id +=",
"_source_encoding='ascii' _exports = ['stylesheets', 'title'] def _mako_get_namespace(context, name): try: return context.namespaces[(__name__, name)] except",
"__M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"view-info\" href=\"') # SOURCE LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url",
"LINE 1 __M_writer(u'\\n') # SOURCE LINE 2 __M_writer(u'\\n\\n') # SOURCE LINE 4 __M_writer(u'\\n\\n')",
"except KeyError: _mako_generate_namespaces(context) return context.namespaces[(__name__, name)] def _mako_generate_namespaces(context): # SOURCE LINE 2 ns",
"= context.writer() # SOURCE LINE 6 __M_writer(u'\\n ') # SOURCE LINE 7 __M_writer(unicode(parent.stylesheets()))",
"</table>\\n </div>\\n <div style=\"clear: both\"></div>\\n </div>\\n</div>\\n') return '' finally: context.caller_stack._pop_frame() def render_stylesheets(context): context.caller_stack._push_frame()",
"context.get('h', UNDEFINED)) parent = _import_ns.get('parent', context.get('parent', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE",
"= context.writer() # SOURCE LINE 1 __M_writer(u'\\n') # SOURCE LINE 2 __M_writer(u'\\n\\n') #",
"</div>\\n <div style=\"clear: both\"></div>\\n </div>\\n</div>\\n') return '' finally: context.caller_stack._pop_frame() def render_stylesheets(context): context.caller_stack._push_frame() try:",
"<div class=\"form-row\">\\n <table class=\"grid\">\\n ') # SOURCE LINE 20 shed_id = 0 __M_locals_builtin_stored",
"context.caller_stack._pop_frame() def render_stylesheets(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h =",
"action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Browse valid repositories</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 29",
"LINE 34 shed_id += 1 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in",
"# SOURCE LINE 20 shed_id = 0 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for",
"# -*- encoding:ascii -*- from mako import runtime, filters, cache UNDEFINED = runtime.UNDEFINED",
"1 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key in",
"__M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored]))",
"# SOURCE LINE 29 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for valid tools</a>\\n",
"both\"></div>\\n </div>\\n</div>\\n') return '' finally: context.caller_stack._pop_frame() def render_stylesheets(context): context.caller_stack._push_frame() try: _import_ns = {}",
"= {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer = context.writer() # SOURCE LINE 4 __M_writer(u'Configured",
"# SOURCE LINE 27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"action-button\" href=\"') # SOURCE LINE 28",
"__M_locals_builtin_stored])) __M_writer(u'\\n') # SOURCE LINE 21 for name, url in trans.app.tool_shed_registry.tool_sheds.items(): # SOURCE",
"0 __M_locals_builtin_stored = __M_locals_builtin() __M_locals.update(__M_dict_builtin([(__M_key, __M_locals_builtin_stored[__M_key]) for __M_key in ['shed_id'] if __M_key in",
"))) __M_writer(u'\">Browse valid repositories</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 29 __M_writer(unicode(h.url_for( controller='admin_toolshed',",
"# SOURCE LINE 24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"view-info\" href=\"') # SOURCE LINE 25",
"style=\"float: left; margin-left: 1px;\" class=\"menubutton split popup\" id=\"dataset-') # SOURCE LINE 24 __M_writer(unicode(shed_id))",
"= 6 _modified_time = 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii' _exports = ['stylesheets',",
"__M_locals_builtin_stored])) __M_writer(u'\\n') pass # SOURCE LINE 36 __M_writer(u' </tr>\\n </table>\\n </div>\\n <div style=\"clear:",
"context.get('render_msg', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 1 __M_writer(u'\\n') # SOURCE LINE",
"= context.writer() # SOURCE LINE 4 __M_writer(u'Configured Galaxy tool sheds') return '' finally:",
"def render_title(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer = context.writer()",
"def _mako_inherit(template, context): _mako_generate_namespaces(context) return runtime._inherit_from(context, u'/base.mako', _template_uri) def render_body(context,**pageargs): context.caller_stack._push_frame() try: __M_locals",
"_template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii' _exports = ['stylesheets', 'title'] def _mako_get_namespace(context, name): try:",
"= {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) status = _import_ns.get('status', context.get('status', UNDEFINED)) h = _import_ns.get('h',",
"repositories</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 29 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url )))",
"UNDEFINED)) parent = _import_ns.get('parent', context.get('parent', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 6",
"# SOURCE LINE 7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ') # SOURCE LINE 8 __M_writer(unicode(h.css( \"library\"",
"import runtime, filters, cache UNDEFINED = runtime.UNDEFINED __M_dict_builtin = dict __M_locals_builtin = locals",
"_mako_generate_namespaces(context) return runtime._inherit_from(context, u'/base.mako', _template_uri) def render_body(context,**pageargs): context.caller_stack._push_frame() try: __M_locals = __M_dict_builtin(pageargs=pageargs) _import_ns",
"context.writer() # SOURCE LINE 6 __M_writer(u'\\n ') # SOURCE LINE 7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n",
"parent = _import_ns.get('parent', context.get('parent', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 6 __M_writer(u'\\n",
"for __M_key in ['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') pass # SOURCE LINE",
"render_title(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer = context.writer() #",
"locals _magic_number = 6 _modified_time = 1433361565.2110319 _template_filename='templates/webapps/galaxy/admin/tool_sheds.mako' _template_uri='/webapps/galaxy/admin/tool_sheds.mako' _template_cache=cache.Cache(__name__, _modified_time) _source_encoding='ascii' _exports",
"['shed_id'] if __M_key in __M_locals_builtin_stored])) __M_writer(u'\\n') # SOURCE LINE 21 for name, url",
"<div popupmenu=\"dataset-') # SOURCE LINE 27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"action-button\" href=\"') # SOURCE",
"LINE 28 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Browse valid repositories</a>\\n <a class=\"action-button\" href=\"')",
"{} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) h = _import_ns.get('h', context.get('h', UNDEFINED)) parent = _import_ns.get('parent', context.get('parent',",
"__M_writer(u'\\n') # SOURCE LINE 2 __M_writer(u'\\n\\n') # SOURCE LINE 4 __M_writer(u'\\n\\n') # SOURCE",
"</div>\\n </td>\\n </tr>\\n ') # SOURCE LINE 34 shed_id += 1 __M_locals_builtin_stored =",
"context.get('trans', UNDEFINED)) render_msg = _import_ns.get('render_msg', context.get('render_msg', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE",
"SOURCE LINE 6 __M_writer(u'\\n ') # SOURCE LINE 7 __M_writer(unicode(parent.stylesheets())) __M_writer(u'\\n ') #",
"finally: context.caller_stack._pop_frame() def render_title(context): context.caller_stack._push_frame() try: _import_ns = {} _mako_get_namespace(context, '__anon_0x7f903c23edd0')._populate(_import_ns, [u'render_msg']) __M_writer",
"h = _import_ns.get('h', context.get('h', UNDEFINED)) message = _import_ns.get('message', context.get('message', UNDEFINED)) trans = _import_ns.get('trans',",
"href=\"') # SOURCE LINE 30 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for workflows</a>\\n",
"<td>\\n <div style=\"float: left; margin-left: 1px;\" class=\"menubutton split popup\" id=\"dataset-') # SOURCE LINE",
"__M_writer(unicode(render_msg( message, status ))) __M_writer(u'\\n') pass # SOURCE LINE 14 __M_writer(u'\\n<div class=\"toolForm\">\\n <div",
"valid repositories</a>\\n <a class=\"action-button\" href=\"') # SOURCE LINE 29 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url",
"_import_ns.get('parent', context.get('parent', UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 6 __M_writer(u'\\n ') #",
"))) __M_writer(u'\">') __M_writer(unicode(name)) __M_writer(u'</a>\\n </div>\\n <div popupmenu=\"dataset-') # SOURCE LINE 27 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n",
"context.namespaces[(__name__, name)] def _mako_generate_namespaces(context): # SOURCE LINE 2 ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0', context._clean_inheritance_tokens(), templateuri=u'/message.mako',",
"__M_writer(u' ') __M_writer(unicode(render_msg( message, status ))) __M_writer(u'\\n') pass # SOURCE LINE 14 __M_writer(u'\\n<div",
"__M_writer = context.writer() # SOURCE LINE 1 __M_writer(u'\\n') # SOURCE LINE 2 __M_writer(u'\\n\\n')",
"<a class=\"action-button\" href=\"') # SOURCE LINE 30 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_workflows_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search",
"UNDEFINED)) __M_writer = context.writer() # SOURCE LINE 6 __M_writer(u'\\n ') # SOURCE LINE",
"runtime.UNDEFINED __M_dict_builtin = dict __M_locals_builtin = locals _magic_number = 6 _modified_time = 1433361565.2110319",
"12 __M_writer(u' ') __M_writer(unicode(render_msg( message, status ))) __M_writer(u'\\n') pass # SOURCE LINE 14",
"# SOURCE LINE 1 __M_writer(u'\\n') # SOURCE LINE 2 __M_writer(u'\\n\\n') # SOURCE LINE",
"calling_uri=_template_uri) context.namespaces[(__name__, '__anon_0x7f903c23edd0')] = ns def _mako_inherit(template, context): _mako_generate_namespaces(context) return runtime._inherit_from(context, u'/base.mako', _template_uri)",
"_mako_generate_namespaces(context) return context.namespaces[(__name__, name)] def _mako_generate_namespaces(context): # SOURCE LINE 2 ns = runtime.TemplateNamespace('__anon_0x7f903c23edd0',",
"24 __M_writer(unicode(shed_id)) __M_writer(u'-popup\">\\n <a class=\"view-info\" href=\"') # SOURCE LINE 25 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='browse_tool_shed',",
"LINE 36 __M_writer(u' </tr>\\n </table>\\n </div>\\n <div style=\"clear: both\"></div>\\n </div>\\n</div>\\n') return '' finally:",
"29 __M_writer(unicode(h.url_for( controller='admin_toolshed', action='find_tools_in_tool_shed', tool_shed_url=url ))) __M_writer(u'\">Search for valid tools</a>\\n <a class=\"action-button\" href=\"')"
] |
[
"feed=feed, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_04_row3(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=20)",
"prog.run() cnc.reset() def test_04_row3(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=20) prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\",",
"@pytest.fixture(scope=\"session\") def cnc(request): grbl_cfg = { \"port\": request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"), } cnc =",
"= gcode.Line(power=255, feed=feed, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_04_row3(cnc): prog = gcode.GCode(machine=cnc) prog.G90()",
"} cnc = grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset() # Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") # Set",
"test_01_laser_power(cnc, laser_power): prog = gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_02_row2(cnc): prog =",
"machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_04_row3(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=20) prog.run()",
"def cnc(request): grbl_cfg = { \"port\": request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"), } cnc = grbl.Grbl(**grbl_cfg)",
"[150, 200, 255]) @pytest.mark.parametrize(\"feed\", [30, 180]) def test_05_laser_power_feed(cnc, dynamic_power, power, feed): prog =",
"= gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=10) prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\", [30, 60, 120, 180, 240,",
"gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_02_row2(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=10)",
"end-stops installed. cnc.cmd(\"G92X0Y0Z0\") yield cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def test_default_line(): print(gcode.Line()) def test_00_row1(cnc): prog",
"prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10, 50, 75, 100, 150, 200, 255]) def test_01_laser_power(cnc, laser_power):",
"{ \"port\": request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"), } cnc = grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset() # Metric",
"# TODO: Get end-stops installed. cnc.cmd(\"G92X0Y0Z0\") yield cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def test_default_line(): print(gcode.Line())",
"@pytest.mark.parametrize(\"power\", [150, 200, 255]) @pytest.mark.parametrize(\"feed\", [30, 180]) def test_05_laser_power_feed(cnc, dynamic_power, power, feed): prog",
"gcode.Line(power=255, feed=feed, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_04_row3(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0,",
"def test_default_line(): print(gcode.Line()) def test_00_row1(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=0) prog.run() cnc.reset()",
"= gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_02_row2(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0,",
"cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def test_default_line(): print(gcode.Line()) def test_00_row1(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0,",
"\"port\": request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"), } cnc = grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset() # Metric cnc.cmd(\"G21\")",
"False]) @pytest.mark.parametrize(\"power\", [150, 200, 255]) @pytest.mark.parametrize(\"feed\", [30, 180]) def test_05_laser_power_feed(cnc, dynamic_power, power, feed):",
"= grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset() # Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") # Set this to",
"test_03_laser_feed(cnc, feed): prog = gcode.Line(power=255, feed=feed, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_04_row3(cnc): prog",
"time import grbl import pytest import gcode @pytest.fixture(scope=\"session\") def cnc(request): grbl_cfg = {",
"Y=0) prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10, 50, 75, 100, 150, 200, 255]) def test_01_laser_power(cnc,",
"# Set this to 0. # TODO: Get end-stops installed. cnc.cmd(\"G92X0Y0Z0\") yield cnc",
"60, 120, 180, 240, 300]) def test_03_laser_feed(cnc, feed): prog = gcode.Line(power=255, feed=feed, machine=cnc)",
"laser_power): prog = gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_02_row2(cnc): prog = gcode.GCode(machine=cnc)",
"[10, 50, 75, 100, 150, 200, 255]) def test_01_laser_power(cnc, laser_power): prog = gcode.Line(power=laser_power,",
"cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def test_default_line(): print(gcode.Line()) def test_00_row1(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=0)",
"50, 75, 100, 150, 200, 255]) def test_01_laser_power(cnc, laser_power): prog = gcode.Line(power=laser_power, machine=cnc)",
"gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=10) prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\", [30, 60, 120, 180, 240, 300])",
"gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=20) prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True, False]) @pytest.mark.parametrize(\"power\", [150, 200, 255])",
"prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\", [30, 60, 120, 180, 240, 300]) def test_03_laser_feed(cnc, feed): prog",
"cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True, False]) @pytest.mark.parametrize(\"power\", [150, 200, 255]) @pytest.mark.parametrize(\"feed\", [30, 180]) def test_05_laser_power_feed(cnc,",
"75, 100, 150, 200, 255]) def test_01_laser_power(cnc, laser_power): prog = gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\")",
"gcode @pytest.fixture(scope=\"session\") def cnc(request): grbl_cfg = { \"port\": request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"), } cnc",
"grbl_cfg = { \"port\": request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"), } cnc = grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset()",
"test_00_row1(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=0) prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10, 50, 75,",
"def test_03_laser_feed(cnc, feed): prog = gcode.Line(power=255, feed=feed, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_04_row3(cnc):",
"cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10, 50, 75, 100, 150, 200, 255]) def test_01_laser_power(cnc, laser_power): prog",
"Y=20) prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True, False]) @pytest.mark.parametrize(\"power\", [150, 200, 255]) @pytest.mark.parametrize(\"feed\", [30, 180])",
"cnc(request): grbl_cfg = { \"port\": request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"), } cnc = grbl.Grbl(**grbl_cfg) time.sleep(2)",
"@pytest.mark.parametrize(\"feed\", [30, 60, 120, 180, 240, 300]) def test_03_laser_feed(cnc, feed): prog = gcode.Line(power=255,",
"0. # TODO: Get end-stops installed. cnc.cmd(\"G92X0Y0Z0\") yield cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def test_default_line():",
"255]) def test_01_laser_power(cnc, laser_power): prog = gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_02_row2(cnc):",
"prog = gcode.Line(power=255, feed=feed, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_04_row3(cnc): prog = gcode.GCode(machine=cnc)",
"\"baudrate\": request.config.getoption(\"--baudrate\"), } cnc = grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset() # Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\")",
"def test_04_row3(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=20) prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True, False])",
"feed): prog = gcode.Line(power=255, feed=feed, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_04_row3(cnc): prog =",
"Y=10) prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\", [30, 60, 120, 180, 240, 300]) def test_03_laser_feed(cnc, feed):",
"prog.G90() prog.G0(X=0, Y=10) prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\", [30, 60, 120, 180, 240, 300]) def",
"TODO: Get end-stops installed. cnc.cmd(\"G92X0Y0Z0\") yield cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def test_default_line(): print(gcode.Line()) def",
"gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=0) prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10, 50, 75, 100, 150, 200,",
"prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=20) prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True, False]) @pytest.mark.parametrize(\"power\", [150,",
"prog.G0(X=0, Y=10) prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\", [30, 60, 120, 180, 240, 300]) def test_03_laser_feed(cnc,",
"[True, False]) @pytest.mark.parametrize(\"power\", [150, 200, 255]) @pytest.mark.parametrize(\"feed\", [30, 180]) def test_05_laser_power_feed(cnc, dynamic_power, power,",
"request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"), } cnc = grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset() # Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\")",
"cnc.cmd(\"G91\") prog.run() cnc.reset() def test_02_row2(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=10) prog.run() cnc.reset()",
"def test_05_laser_power_feed(cnc, dynamic_power, power, feed): prog = gcode.Line(machine=cnc, dynamic_power=dynamic_power, power=power, feed=feed) cnc.cmd(\"G91\") prog.run()",
"@pytest.mark.parametrize(\"laser_power\", [10, 50, 75, 100, 150, 200, 255]) def test_01_laser_power(cnc, laser_power): prog =",
"180, 240, 300]) def test_03_laser_feed(cnc, feed): prog = gcode.Line(power=255, feed=feed, machine=cnc) cnc.cmd(\"G91\") prog.run()",
"cnc.cmd(\"G92X0Y0Z0\") yield cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def test_default_line(): print(gcode.Line()) def test_00_row1(cnc): prog = gcode.GCode(machine=cnc)",
"prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True, False]) @pytest.mark.parametrize(\"power\", [150, 200, 255]) @pytest.mark.parametrize(\"feed\", [30, 180]) def",
"def test_01_laser_power(cnc, laser_power): prog = gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_02_row2(cnc): prog",
"machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_02_row2(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=10) prog.run()",
"Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") # Set this to 0. # TODO: Get end-stops",
"200, 255]) @pytest.mark.parametrize(\"feed\", [30, 180]) def test_05_laser_power_feed(cnc, dynamic_power, power, feed): prog = gcode.Line(machine=cnc,",
"prog.G90() prog.G0(X=0, Y=0) prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10, 50, 75, 100, 150, 200, 255])",
"300]) def test_03_laser_feed(cnc, feed): prog = gcode.Line(power=255, feed=feed, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def",
"180]) def test_05_laser_power_feed(cnc, dynamic_power, power, feed): prog = gcode.Line(machine=cnc, dynamic_power=dynamic_power, power=power, feed=feed) cnc.cmd(\"G91\")",
"@pytest.mark.parametrize(\"feed\", [30, 180]) def test_05_laser_power_feed(cnc, dynamic_power, power, feed): prog = gcode.Line(machine=cnc, dynamic_power=dynamic_power, power=power,",
"cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") # Set this to 0. # TODO: Get end-stops installed.",
"import gcode @pytest.fixture(scope=\"session\") def cnc(request): grbl_cfg = { \"port\": request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"), }",
"test_05_laser_power_feed(cnc, dynamic_power, power, feed): prog = gcode.Line(machine=cnc, dynamic_power=dynamic_power, power=power, feed=feed) cnc.cmd(\"G91\") prog.run() cnc.reset()",
"prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=10) prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\", [30, 60, 120, 180,",
"cnc.cmd(\"G91\") prog.run() cnc.reset() def test_04_row3(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=20) prog.run() cnc.reset()",
"import time import grbl import pytest import gcode @pytest.fixture(scope=\"session\") def cnc(request): grbl_cfg =",
"= { \"port\": request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"), } cnc = grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset() #",
"request.config.getoption(\"--baudrate\"), } cnc = grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset() # Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") #",
"cnc.cmd(\"G0X0Y0F300\") def test_default_line(): print(gcode.Line()) def test_00_row1(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=0) prog.run()",
"200, 255]) def test_01_laser_power(cnc, laser_power): prog = gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def",
"print(gcode.Line()) def test_00_row1(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=0) prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10,",
"prog.G0(X=0, Y=0) prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10, 50, 75, 100, 150, 200, 255]) def",
"= gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=0) prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10, 50, 75, 100, 150,",
"installed. cnc.cmd(\"G92X0Y0Z0\") yield cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def test_default_line(): print(gcode.Line()) def test_00_row1(cnc): prog =",
"prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=0) prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10, 50, 75, 100,",
"test_02_row2(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=10) prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\", [30, 60, 120,",
"cnc.reset() # Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") # Set this to 0. # TODO:",
"import grbl import pytest import gcode @pytest.fixture(scope=\"session\") def cnc(request): grbl_cfg = { \"port\":",
"cnc.reset() def test_02_row2(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=10) prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\", [30,",
"240, 300]) def test_03_laser_feed(cnc, feed): prog = gcode.Line(power=255, feed=feed, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset()",
"prog.G90() prog.G0(X=0, Y=20) prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True, False]) @pytest.mark.parametrize(\"power\", [150, 200, 255]) @pytest.mark.parametrize(\"feed\",",
"time.sleep(2) cnc.reset() # Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") # Set this to 0. #",
"prog.run() cnc.reset() def test_02_row2(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=10) prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\",",
"prog.G0(X=0, Y=20) prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True, False]) @pytest.mark.parametrize(\"power\", [150, 200, 255]) @pytest.mark.parametrize(\"feed\", [30,",
"[30, 60, 120, 180, 240, 300]) def test_03_laser_feed(cnc, feed): prog = gcode.Line(power=255, feed=feed,",
"120, 180, 240, 300]) def test_03_laser_feed(cnc, feed): prog = gcode.Line(power=255, feed=feed, machine=cnc) cnc.cmd(\"G91\")",
"cnc.reset() @pytest.mark.parametrize(\"feed\", [30, 60, 120, 180, 240, 300]) def test_03_laser_feed(cnc, feed): prog =",
"test_default_line(): print(gcode.Line()) def test_00_row1(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=0) prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\",",
"@pytest.mark.parametrize(\"dynamic_power\", [True, False]) @pytest.mark.parametrize(\"power\", [150, 200, 255]) @pytest.mark.parametrize(\"feed\", [30, 180]) def test_05_laser_power_feed(cnc, dynamic_power,",
"def test_00_row1(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=0) prog.run() cnc.reset() @pytest.mark.parametrize(\"laser_power\", [10, 50,",
"pytest import gcode @pytest.fixture(scope=\"session\") def cnc(request): grbl_cfg = { \"port\": request.config.getoption(\"--port\"), \"baudrate\": request.config.getoption(\"--baudrate\"),",
"grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset() # Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") # Set this to 0.",
"# Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") # Set this to 0. # TODO: Get",
"= gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=20) prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True, False]) @pytest.mark.parametrize(\"power\", [150, 200,",
"150, 200, 255]) def test_01_laser_power(cnc, laser_power): prog = gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset()",
"def test_02_row2(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=10) prog.run() cnc.reset() @pytest.mark.parametrize(\"feed\", [30, 60,",
"to 0. # TODO: Get end-stops installed. cnc.cmd(\"G92X0Y0Z0\") yield cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def",
"100, 150, 200, 255]) def test_01_laser_power(cnc, laser_power): prog = gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\") prog.run()",
"test_04_row3(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=20) prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True, False]) @pytest.mark.parametrize(\"power\",",
"cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") # Set this to 0. # TODO: Get end-stops installed. cnc.cmd(\"G92X0Y0Z0\")",
"[30, 180]) def test_05_laser_power_feed(cnc, dynamic_power, power, feed): prog = gcode.Line(machine=cnc, dynamic_power=dynamic_power, power=power, feed=feed)",
"cnc = grbl.Grbl(**grbl_cfg) time.sleep(2) cnc.reset() # Metric cnc.cmd(\"G21\") cnc.cmd(\"G91\") cnc.cmd(\"G0X5Y5F300\") # Set this",
"this to 0. # TODO: Get end-stops installed. cnc.cmd(\"G92X0Y0Z0\") yield cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\")",
"yield cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def test_default_line(): print(gcode.Line()) def test_00_row1(cnc): prog = gcode.GCode(machine=cnc) prog.G90()",
"cnc.reset() def test_04_row3(cnc): prog = gcode.GCode(machine=cnc) prog.G90() prog.G0(X=0, Y=20) prog.run() cnc.reset() @pytest.mark.parametrize(\"dynamic_power\", [True,",
"Get end-stops installed. cnc.cmd(\"G92X0Y0Z0\") yield cnc cnc.cmd(\"G90\") cnc.cmd(\"G0X0Y0F300\") def test_default_line(): print(gcode.Line()) def test_00_row1(cnc):",
"255]) @pytest.mark.parametrize(\"feed\", [30, 180]) def test_05_laser_power_feed(cnc, dynamic_power, power, feed): prog = gcode.Line(machine=cnc, dynamic_power=dynamic_power,",
"grbl import pytest import gcode @pytest.fixture(scope=\"session\") def cnc(request): grbl_cfg = { \"port\": request.config.getoption(\"--port\"),",
"cnc.cmd(\"G0X5Y5F300\") # Set this to 0. # TODO: Get end-stops installed. cnc.cmd(\"G92X0Y0Z0\") yield",
"Set this to 0. # TODO: Get end-stops installed. cnc.cmd(\"G92X0Y0Z0\") yield cnc cnc.cmd(\"G90\")",
"prog = gcode.Line(power=laser_power, machine=cnc) cnc.cmd(\"G91\") prog.run() cnc.reset() def test_02_row2(cnc): prog = gcode.GCode(machine=cnc) prog.G90()",
"import pytest import gcode @pytest.fixture(scope=\"session\") def cnc(request): grbl_cfg = { \"port\": request.config.getoption(\"--port\"), \"baudrate\":"
] |
[
"import numpy as np import autoeap from numpy.testing import assert_array_almost_equal import os PACKAGEDIR",
"as np import autoeap from numpy.testing import assert_array_almost_equal import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))",
"autoeap from numpy.testing import assert_array_almost_equal import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) def test_raw_lightcurve(): time,flux,flux_err",
"assert_array_almost_equal import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) def test_raw_lightcurve(): time,flux,flux_err = autoeap.createlightcurve('EPIC220198696',campaign=8) lc =",
"numpy.testing import assert_array_almost_equal import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) def test_raw_lightcurve(): time,flux,flux_err = autoeap.createlightcurve('EPIC220198696',campaign=8)",
"import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) def test_raw_lightcurve(): time,flux,flux_err = autoeap.createlightcurve('EPIC220198696',campaign=8) lc = np.genfromtxt(os.path.join(PACKAGEDIR,\"EPIC220198696_c8_autoEAP.lc\"),skip_header=1).T",
"from numpy.testing import assert_array_almost_equal import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) def test_raw_lightcurve(): time,flux,flux_err =",
"PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) def test_raw_lightcurve(): time,flux,flux_err = autoeap.createlightcurve('EPIC220198696',campaign=8) lc = np.genfromtxt(os.path.join(PACKAGEDIR,\"EPIC220198696_c8_autoEAP.lc\"),skip_header=1).T assert_array_almost_equal(time,lc[0]) assert_array_almost_equal(flux,lc[1].astype(np.float32))",
"import autoeap from numpy.testing import assert_array_almost_equal import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) def test_raw_lightcurve():",
"import assert_array_almost_equal import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) def test_raw_lightcurve(): time,flux,flux_err = autoeap.createlightcurve('EPIC220198696',campaign=8) lc",
"numpy as np import autoeap from numpy.testing import assert_array_almost_equal import os PACKAGEDIR =",
"os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) def test_raw_lightcurve(): time,flux,flux_err = autoeap.createlightcurve('EPIC220198696',campaign=8) lc = np.genfromtxt(os.path.join(PACKAGEDIR,\"EPIC220198696_c8_autoEAP.lc\"),skip_header=1).T assert_array_almost_equal(time,lc[0])",
"= os.path.abspath(os.path.dirname(__file__)) def test_raw_lightcurve(): time,flux,flux_err = autoeap.createlightcurve('EPIC220198696',campaign=8) lc = np.genfromtxt(os.path.join(PACKAGEDIR,\"EPIC220198696_c8_autoEAP.lc\"),skip_header=1).T assert_array_almost_equal(time,lc[0]) assert_array_almost_equal(flux,lc[1].astype(np.float32)) assert_array_almost_equal(flux_err,lc[2].astype(np.float32))",
"np import autoeap from numpy.testing import assert_array_almost_equal import os PACKAGEDIR = os.path.abspath(os.path.dirname(__file__)) def"
] |
[
"= resp.get('balance') try: weth_contract_address = supported_tokens.get('WETH') if supported_tokens and supported_tokens.get('WETH') else None if",
"baseurl = self.get_baseurl(network) if baseurl is None: raise Exception( 'UnchainedClient is not supported",
"network self.baseurl = baseurl @staticmethod def get_baseurl(network): return { ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network) def",
"supported_tokens.get('WETH') if supported_tokens and supported_tokens.get('WETH') else None if weth_contract_address: if balances.get(weth_contract_address) is None:",
"if supported_tokens and supported_tokens.get('WETH') else None if weth_contract_address: if balances.get(weth_contract_address) is None: weth_address",
"UnchainedClient(object): def __init__(self, network): baseurl = self.get_baseurl(network) if baseurl is None: raise Exception(",
"return { ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network) def get_balances(self, address, account_id, supported_tokens=None): if not address:",
"import logging import os import urllib3 import ast from common.utils.requests import http from",
"return dict() resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances = {token.get('contract').lower(): token.get('balance') for token in",
"if weth_address: contract = web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance = contract.functions.balanceOf(address).call() balances[weth_address.lower()] = balance except",
"is not supported for network: {}'.format(network) ) self.network = network self.baseurl = baseurl",
"common.utils.requests import http from common.utils.networks import ETH from common.services import cointainer_web3 as web3",
"web3.toChecksumAddress(weth_contract_address) if weth_address: contract = web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance = contract.functions.balanceOf(address).call() balances[weth_address.lower()] = balance",
"from common.utils.networks import ETH from common.services import cointainer_web3 as web3 from common.utils.ethereum import",
"fetch WETH: %s balance for address: %s\", weth_contract_address, address) logger.error(e) return balances def",
"as web3 from common.utils.ethereum import ERC20_ABI logger = logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object): def __init__(self,",
"to get %s balances for account: %s. No associated address.\", self.network, account_id) return",
"{ ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network) def get_balances(self, address, account_id, supported_tokens=None): if not address: logger.error(\"Unable",
"os import urllib3 import ast from common.utils.requests import http from common.utils.networks import ETH",
"class UnchainedClient(object): def __init__(self, network): baseurl = self.get_baseurl(network) if baseurl is None: raise",
"= supported_tokens.get('WETH') if supported_tokens and supported_tokens.get('WETH') else None if weth_contract_address: if balances.get(weth_contract_address) is",
"None: weth_address = web3.toChecksumAddress(weth_contract_address) if weth_address: contract = web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance = contract.functions.balanceOf(address).call()",
"if weth_contract_address: if balances.get(weth_contract_address) is None: weth_address = web3.toChecksumAddress(weth_contract_address) if weth_address: contract =",
"import http from common.utils.networks import ETH from common.services import cointainer_web3 as web3 from",
"address.\", self.network, account_id) return dict() resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances = {token.get('contract').lower(): token.get('balance')",
"weth_address: contract = web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance = contract.functions.balanceOf(address).call() balances[weth_address.lower()] = balance except Exception",
"except Exception as e: logger.error(\"Failed to fetch WETH: %s balance for address: %s\",",
"= http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances = {token.get('contract').lower(): token.get('balance') for token in resp.get('tokens', list())} balances[ETH]",
"http from common.utils.networks import ETH from common.services import cointainer_web3 as web3 from common.utils.ethereum",
"is None: raise Exception( 'UnchainedClient is not supported for network: {}'.format(network) ) self.network",
"= web3.toChecksumAddress(weth_contract_address) if weth_address: contract = web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance = contract.functions.balanceOf(address).call() balances[weth_address.lower()] =",
"ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network) def get_balances(self, address, account_id, supported_tokens=None): if not address: logger.error(\"Unable to",
"None if weth_contract_address: if balances.get(weth_contract_address) is None: weth_address = web3.toChecksumAddress(weth_contract_address) if weth_address: contract",
"logger = logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object): def __init__(self, network): baseurl = self.get_baseurl(network) if baseurl",
"ast from common.utils.requests import http from common.utils.networks import ETH from common.services import cointainer_web3",
"= contract.functions.balanceOf(address).call() balances[weth_address.lower()] = balance except Exception as e: logger.error(\"Failed to fetch WETH:",
"network): baseurl = self.get_baseurl(network) if baseurl is None: raise Exception( 'UnchainedClient is not",
"get_baseurl(network): return { ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network) def get_balances(self, address, account_id, supported_tokens=None): if not",
"account_id) return dict() resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances = {token.get('contract').lower(): token.get('balance') for token",
") self.network = network self.baseurl = baseurl @staticmethod def get_baseurl(network): return { ETH:",
"import os import urllib3 import ast from common.utils.requests import http from common.utils.networks import",
"balances[weth_address.lower()] = balance except Exception as e: logger.error(\"Failed to fetch WETH: %s balance",
"None: raise Exception( 'UnchainedClient is not supported for network: {}'.format(network) ) self.network =",
"import urllib3 import ast from common.utils.requests import http from common.utils.networks import ETH from",
"common.utils.ethereum import ERC20_ABI logger = logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object): def __init__(self, network): baseurl =",
"@staticmethod def get_baseurl(network): return { ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network) def get_balances(self, address, account_id, supported_tokens=None):",
"import ETH from common.services import cointainer_web3 as web3 from common.utils.ethereum import ERC20_ABI logger",
"weth_address = web3.toChecksumAddress(weth_contract_address) if weth_address: contract = web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance = contract.functions.balanceOf(address).call() balances[weth_address.lower()]",
"Exception( 'UnchainedClient is not supported for network: {}'.format(network) ) self.network = network self.baseurl",
"supported_tokens.get('WETH') else None if weth_contract_address: if balances.get(weth_contract_address) is None: weth_address = web3.toChecksumAddress(weth_contract_address) if",
"= logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object): def __init__(self, network): baseurl = self.get_baseurl(network) if baseurl is",
"= self.get_baseurl(network) if baseurl is None: raise Exception( 'UnchainedClient is not supported for",
"for account: %s. No associated address.\", self.network, account_id) return dict() resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl,",
"resp.get('tokens', list())} balances[ETH] = resp.get('balance') try: weth_contract_address = supported_tokens.get('WETH') if supported_tokens and supported_tokens.get('WETH')",
"and supported_tokens.get('WETH') else None if weth_contract_address: if balances.get(weth_contract_address) is None: weth_address = web3.toChecksumAddress(weth_contract_address)",
"weth_contract_address = supported_tokens.get('WETH') if supported_tokens and supported_tokens.get('WETH') else None if weth_contract_address: if balances.get(weth_contract_address)",
"ETH from common.services import cointainer_web3 as web3 from common.utils.ethereum import ERC20_ABI logger =",
"contract.functions.balanceOf(address).call() balances[weth_address.lower()] = balance except Exception as e: logger.error(\"Failed to fetch WETH: %s",
"if balances.get(weth_contract_address) is None: weth_address = web3.toChecksumAddress(weth_contract_address) if weth_address: contract = web3.eth.contract(address=weth_address, abi=ERC20_ABI)",
"logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object): def __init__(self, network): baseurl = self.get_baseurl(network) if baseurl is None:",
"dict() resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances = {token.get('contract').lower(): token.get('balance') for token in resp.get('tokens',",
"account: %s. No associated address.\", self.network, account_id) return dict() resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data",
"resp.get('balance') try: weth_contract_address = supported_tokens.get('WETH') if supported_tokens and supported_tokens.get('WETH') else None if weth_contract_address:",
"for network: {}'.format(network) ) self.network = network self.baseurl = baseurl @staticmethod def get_baseurl(network):",
"associated address.\", self.network, account_id) return dict() resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances = {token.get('contract').lower():",
"web3 from common.utils.ethereum import ERC20_ABI logger = logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object): def __init__(self, network):",
"token.get('balance') for token in resp.get('tokens', list())} balances[ETH] = resp.get('balance') try: weth_contract_address = supported_tokens.get('WETH')",
"in resp.get('tokens', list())} balances[ETH] = resp.get('balance') try: weth_contract_address = supported_tokens.get('WETH') if supported_tokens and",
"cointainer_web3 as web3 from common.utils.ethereum import ERC20_ABI logger = logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object): def",
"contract = web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance = contract.functions.balanceOf(address).call() balances[weth_address.lower()] = balance except Exception as",
"%s. No associated address.\", self.network, account_id) return dict() resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances",
"}.get(network) def get_balances(self, address, account_id, supported_tokens=None): if not address: logger.error(\"Unable to get %s",
"is None: weth_address = web3.toChecksumAddress(weth_contract_address) if weth_address: contract = web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance =",
"if baseurl is None: raise Exception( 'UnchainedClient is not supported for network: {}'.format(network)",
"address)).json_data balances = {token.get('contract').lower(): token.get('balance') for token in resp.get('tokens', list())} balances[ETH] = resp.get('balance')",
"else None if weth_contract_address: if balances.get(weth_contract_address) is None: weth_address = web3.toChecksumAddress(weth_contract_address) if weth_address:",
"= network self.baseurl = baseurl @staticmethod def get_baseurl(network): return { ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network)",
"list())} balances[ETH] = resp.get('balance') try: weth_contract_address = supported_tokens.get('WETH') if supported_tokens and supported_tokens.get('WETH') else",
"get_balances(self, address, account_id, supported_tokens=None): if not address: logger.error(\"Unable to get %s balances for",
"def get_balances(self, address, account_id, supported_tokens=None): if not address: logger.error(\"Unable to get %s balances",
"self.network, account_id) return dict() resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances = {token.get('contract').lower(): token.get('balance') for",
"{}'.format(network) ) self.network = network self.baseurl = baseurl @staticmethod def get_baseurl(network): return {",
"= web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance = contract.functions.balanceOf(address).call() balances[weth_address.lower()] = balance except Exception as e:",
"self.network = network self.baseurl = baseurl @staticmethod def get_baseurl(network): return { ETH: os.getenv('UNCHAINED_ETH_URL')",
"as e: logger.error(\"Failed to fetch WETH: %s balance for address: %s\", weth_contract_address, address)",
"common.services import cointainer_web3 as web3 from common.utils.ethereum import ERC20_ABI logger = logging.getLogger('watchtower.common.services.unchained') class",
"address: logger.error(\"Unable to get %s balances for account: %s. No associated address.\", self.network,",
"= {token.get('contract').lower(): token.get('balance') for token in resp.get('tokens', list())} balances[ETH] = resp.get('balance') try: weth_contract_address",
"WETH: %s balance for address: %s\", weth_contract_address, address) logger.error(e) return balances def get_client(network):",
"not address: logger.error(\"Unable to get %s balances for account: %s. No associated address.\",",
"def get_baseurl(network): return { ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network) def get_balances(self, address, account_id, supported_tokens=None): if",
"supported_tokens and supported_tokens.get('WETH') else None if weth_contract_address: if balances.get(weth_contract_address) is None: weth_address =",
"account_id, supported_tokens=None): if not address: logger.error(\"Unable to get %s balances for account: %s.",
"common.utils.networks import ETH from common.services import cointainer_web3 as web3 from common.utils.ethereum import ERC20_ABI",
"%s balances for account: %s. No associated address.\", self.network, account_id) return dict() resp",
"not supported for network: {}'.format(network) ) self.network = network self.baseurl = baseurl @staticmethod",
"address, account_id, supported_tokens=None): if not address: logger.error(\"Unable to get %s balances for account:",
"baseurl @staticmethod def get_baseurl(network): return { ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network) def get_balances(self, address, account_id,",
"{token.get('contract').lower(): token.get('balance') for token in resp.get('tokens', list())} balances[ETH] = resp.get('balance') try: weth_contract_address =",
"if not address: logger.error(\"Unable to get %s balances for account: %s. No associated",
"urllib3 import ast from common.utils.requests import http from common.utils.networks import ETH from common.services",
"balances for account: %s. No associated address.\", self.network, account_id) return dict() resp =",
"get %s balances for account: %s. No associated address.\", self.network, account_id) return dict()",
"abi=ERC20_ABI) balance = contract.functions.balanceOf(address).call() balances[weth_address.lower()] = balance except Exception as e: logger.error(\"Failed to",
"try: weth_contract_address = supported_tokens.get('WETH') if supported_tokens and supported_tokens.get('WETH') else None if weth_contract_address: if",
"to fetch WETH: %s balance for address: %s\", weth_contract_address, address) logger.error(e) return balances",
"def __init__(self, network): baseurl = self.get_baseurl(network) if baseurl is None: raise Exception( 'UnchainedClient",
"import cointainer_web3 as web3 from common.utils.ethereum import ERC20_ABI logger = logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object):",
"resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances = {token.get('contract').lower(): token.get('balance') for token in resp.get('tokens', list())}",
"logger.error(\"Failed to fetch WETH: %s balance for address: %s\", weth_contract_address, address) logger.error(e) return",
"balance except Exception as e: logger.error(\"Failed to fetch WETH: %s balance for address:",
"balances[ETH] = resp.get('balance') try: weth_contract_address = supported_tokens.get('WETH') if supported_tokens and supported_tokens.get('WETH') else None",
"from common.utils.ethereum import ERC20_ABI logger = logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object): def __init__(self, network): baseurl",
"os.getenv('UNCHAINED_ETH_URL') }.get(network) def get_balances(self, address, account_id, supported_tokens=None): if not address: logger.error(\"Unable to get",
"balances = {token.get('contract').lower(): token.get('balance') for token in resp.get('tokens', list())} balances[ETH] = resp.get('balance') try:",
"'UnchainedClient is not supported for network: {}'.format(network) ) self.network = network self.baseurl =",
"%s balance for address: %s\", weth_contract_address, address) logger.error(e) return balances def get_client(network): return",
"for token in resp.get('tokens', list())} balances[ETH] = resp.get('balance') try: weth_contract_address = supported_tokens.get('WETH') if",
"http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances = {token.get('contract').lower(): token.get('balance') for token in resp.get('tokens', list())} balances[ETH] =",
"balances.get(weth_contract_address) is None: weth_address = web3.toChecksumAddress(weth_contract_address) if weth_address: contract = web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance",
"from common.utils.requests import http from common.utils.networks import ETH from common.services import cointainer_web3 as",
"import ast from common.utils.requests import http from common.utils.networks import ETH from common.services import",
"No associated address.\", self.network, account_id) return dict() resp = http.get('{}/api/v2/address/{}?details=tokenBalances'.format(self.baseurl, address)).json_data balances =",
"__init__(self, network): baseurl = self.get_baseurl(network) if baseurl is None: raise Exception( 'UnchainedClient is",
"self.baseurl = baseurl @staticmethod def get_baseurl(network): return { ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network) def get_balances(self,",
"supported_tokens=None): if not address: logger.error(\"Unable to get %s balances for account: %s. No",
"weth_contract_address: if balances.get(weth_contract_address) is None: weth_address = web3.toChecksumAddress(weth_contract_address) if weth_address: contract = web3.eth.contract(address=weth_address,",
"balance = contract.functions.balanceOf(address).call() balances[weth_address.lower()] = balance except Exception as e: logger.error(\"Failed to fetch",
"token in resp.get('tokens', list())} balances[ETH] = resp.get('balance') try: weth_contract_address = supported_tokens.get('WETH') if supported_tokens",
"= balance except Exception as e: logger.error(\"Failed to fetch WETH: %s balance for",
"raise Exception( 'UnchainedClient is not supported for network: {}'.format(network) ) self.network = network",
"network: {}'.format(network) ) self.network = network self.baseurl = baseurl @staticmethod def get_baseurl(network): return",
"web3.eth.contract(address=weth_address, abi=ERC20_ABI) balance = contract.functions.balanceOf(address).call() balances[weth_address.lower()] = balance except Exception as e: logger.error(\"Failed",
"Exception as e: logger.error(\"Failed to fetch WETH: %s balance for address: %s\", weth_contract_address,",
"logging import os import urllib3 import ast from common.utils.requests import http from common.utils.networks",
"ERC20_ABI logger = logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object): def __init__(self, network): baseurl = self.get_baseurl(network) if",
"e: logger.error(\"Failed to fetch WETH: %s balance for address: %s\", weth_contract_address, address) logger.error(e)",
"from common.services import cointainer_web3 as web3 from common.utils.ethereum import ERC20_ABI logger = logging.getLogger('watchtower.common.services.unchained')",
"self.get_baseurl(network) if baseurl is None: raise Exception( 'UnchainedClient is not supported for network:",
"import ERC20_ABI logger = logging.getLogger('watchtower.common.services.unchained') class UnchainedClient(object): def __init__(self, network): baseurl = self.get_baseurl(network)",
"baseurl is None: raise Exception( 'UnchainedClient is not supported for network: {}'.format(network) )",
"supported for network: {}'.format(network) ) self.network = network self.baseurl = baseurl @staticmethod def",
"logger.error(\"Unable to get %s balances for account: %s. No associated address.\", self.network, account_id)",
"= baseurl @staticmethod def get_baseurl(network): return { ETH: os.getenv('UNCHAINED_ETH_URL') }.get(network) def get_balances(self, address,",
"balance for address: %s\", weth_contract_address, address) logger.error(e) return balances def get_client(network): return UnchainedClient(network)"
] |
[
"print(x) print(\"-\" * 10) x.append('d') print(x) print(\"-\" * 10) x.__setitem__(4, 'e') print(x) print(\"-\"",
"if index > 0: index -= 1 return list.__getitem__(self, index) def __setitem__(self, index,",
"raise IndexError if index > 0: index -= 1 return list.__getitem__(self, index) def",
"index) def __setitem__(self, index, value): if index == 0: raise IndexError if index",
"0: raise IndexError if index > 0: index -= 1 list.__setitem__(self, index, value)",
"* 10) x.append('d') print(x) print(\"-\" * 10) x.__setitem__(4, 'e') print(x) print(\"-\" * 10)",
"10) x.append('d') print(x) print(\"-\" * 10) x.__setitem__(4, 'e') print(x) print(\"-\" * 10) print(x[1])",
"-= 1 list.__setitem__(self, index, value) if __name__ == '__main__': x = MyList(['a', 'b',",
"'c']) print(x) print(\"-\" * 10) x.append('d') print(x) print(\"-\" * 10) x.__setitem__(4, 'e') print(x)",
"if index > 0: index -= 1 list.__setitem__(self, index, value) if __name__ ==",
"== 0: raise IndexError if index > 0: index -= 1 return list.__getitem__(self,",
"1 return list.__getitem__(self, index) def __setitem__(self, index, value): if index == 0: raise",
"index > 0: index -= 1 return list.__getitem__(self, index) def __setitem__(self, index, value):",
"0: raise IndexError if index > 0: index -= 1 return list.__getitem__(self, index)",
"-= 1 return list.__getitem__(self, index) def __setitem__(self, index, value): if index == 0:",
"> 0: index -= 1 return list.__getitem__(self, index) def __setitem__(self, index, value): if",
"print(\"-\" * 10) x.__setitem__(4, 'e') print(x) print(\"-\" * 10) print(x[1]) print(x.__getitem__(1)) print(\"-\" *",
"0: index -= 1 return list.__getitem__(self, index) def __setitem__(self, index, value): if index",
"index, value): if index == 0: raise IndexError if index > 0: index",
"list.__setitem__(self, index, value) if __name__ == '__main__': x = MyList(['a', 'b', 'c']) print(x)",
"index -= 1 list.__setitem__(self, index, value) if __name__ == '__main__': x = MyList(['a',",
"'__main__': x = MyList(['a', 'b', 'c']) print(x) print(\"-\" * 10) x.append('d') print(x) print(\"-\"",
"def __setitem__(self, index, value): if index == 0: raise IndexError if index >",
"index > 0: index -= 1 list.__setitem__(self, index, value) if __name__ == '__main__':",
"> 0: index -= 1 list.__setitem__(self, index, value) if __name__ == '__main__': x",
"__getitem__(self, index): if index == 0: raise IndexError if index > 0: index",
"if __name__ == '__main__': x = MyList(['a', 'b', 'c']) print(x) print(\"-\" * 10)",
"x = MyList(['a', 'b', 'c']) print(x) print(\"-\" * 10) x.append('d') print(x) print(\"-\" *",
"MyList(['a', 'b', 'c']) print(x) print(\"-\" * 10) x.append('d') print(x) print(\"-\" * 10) x.__setitem__(4,",
"'b', 'c']) print(x) print(\"-\" * 10) x.append('d') print(x) print(\"-\" * 10) x.__setitem__(4, 'e')",
"index == 0: raise IndexError if index > 0: index -= 1 return",
"list.__getitem__(self, index) def __setitem__(self, index, value): if index == 0: raise IndexError if",
"print(\"-\" * 10) x.append('d') print(x) print(\"-\" * 10) x.__setitem__(4, 'e') print(x) print(\"-\" *",
"raise IndexError if index > 0: index -= 1 list.__setitem__(self, index, value) if",
"print(x) print(\"-\" * 10) x.__setitem__(4, 'e') print(x) print(\"-\" * 10) print(x[1]) print(x.__getitem__(1)) print(\"-\"",
"x.__setitem__(4, 'e') print(x) print(\"-\" * 10) print(x[1]) print(x.__getitem__(1)) print(\"-\" * 10) print(x[4]) print(x.__getitem__(4))",
"x.append('d') print(x) print(\"-\" * 10) x.__setitem__(4, 'e') print(x) print(\"-\" * 10) print(x[1]) print(x.__getitem__(1))",
"value) if __name__ == '__main__': x = MyList(['a', 'b', 'c']) print(x) print(\"-\" *",
"index): if index == 0: raise IndexError if index > 0: index -=",
"__setitem__(self, index, value): if index == 0: raise IndexError if index > 0:",
"__name__ == '__main__': x = MyList(['a', 'b', 'c']) print(x) print(\"-\" * 10) x.append('d')",
"MyList(list): def __getitem__(self, index): if index == 0: raise IndexError if index >",
"== 0: raise IndexError if index > 0: index -= 1 list.__setitem__(self, index,",
"= MyList(['a', 'b', 'c']) print(x) print(\"-\" * 10) x.append('d') print(x) print(\"-\" * 10)",
"def __getitem__(self, index): if index == 0: raise IndexError if index > 0:",
"0: index -= 1 list.__setitem__(self, index, value) if __name__ == '__main__': x =",
"* 10) x.__setitem__(4, 'e') print(x) print(\"-\" * 10) print(x[1]) print(x.__getitem__(1)) print(\"-\" * 10)",
"return list.__getitem__(self, index) def __setitem__(self, index, value): if index == 0: raise IndexError",
"IndexError if index > 0: index -= 1 list.__setitem__(self, index, value) if __name__",
"if index == 0: raise IndexError if index > 0: index -= 1",
"index, value) if __name__ == '__main__': x = MyList(['a', 'b', 'c']) print(x) print(\"-\"",
"index == 0: raise IndexError if index > 0: index -= 1 list.__setitem__(self,",
"value): if index == 0: raise IndexError if index > 0: index -=",
"IndexError if index > 0: index -= 1 return list.__getitem__(self, index) def __setitem__(self,",
"1 list.__setitem__(self, index, value) if __name__ == '__main__': x = MyList(['a', 'b', 'c'])",
"== '__main__': x = MyList(['a', 'b', 'c']) print(x) print(\"-\" * 10) x.append('d') print(x)",
"10) x.__setitem__(4, 'e') print(x) print(\"-\" * 10) print(x[1]) print(x.__getitem__(1)) print(\"-\" * 10) print(x[4])",
"index -= 1 return list.__getitem__(self, index) def __setitem__(self, index, value): if index ==",
"class MyList(list): def __getitem__(self, index): if index == 0: raise IndexError if index"
] |
[
"and *PERMANENTLY* DELETES THE TOKENS FOR ALL USERS', ) def handle(self, *args, **options):",
"out and *PERMANENTLY* DELETES THE TOKENS FOR ALL USERS', ) def handle(self, *args,",
"THE TOKENS FOR ALL USERS', ) def handle(self, *args, **options): if not options[\"force\"]:",
"parser.add_argument( '--force', action='store_true', help='WARNING - Understand that this logs out and *PERMANENTLY* DELETES",
"DELETES THE TOKENS FOR ALL USERS', ) def handle(self, *args, **options): if not",
"understand that this will log out all users.\") else: Token.objects.all().delete() print(\"All auth tokens",
"rest_framework.authtoken.models import Token class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--force', action='store_true', help='WARNING -",
"--force if you understand that this will log out all users.\") else: Token.objects.all().delete()",
"that this will log out all users.\") else: Token.objects.all().delete() print(\"All auth tokens deleted.\")",
"def add_arguments(self, parser): parser.add_argument( '--force', action='store_true', help='WARNING - Understand that this logs out",
"*args, **options): if not options[\"force\"]: print(\"Include --force if you understand that this will",
"Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--force', action='store_true', help='WARNING - Understand that this logs",
"django.core.management.base import BaseCommand from rest_framework.authtoken.models import Token class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument(",
"FOR ALL USERS', ) def handle(self, *args, **options): if not options[\"force\"]: print(\"Include --force",
"ALL USERS', ) def handle(self, *args, **options): if not options[\"force\"]: print(\"Include --force if",
"action='store_true', help='WARNING - Understand that this logs out and *PERMANENTLY* DELETES THE TOKENS",
"if not options[\"force\"]: print(\"Include --force if you understand that this will log out",
"if you understand that this will log out all users.\") else: Token.objects.all().delete() print(\"All",
"you understand that this will log out all users.\") else: Token.objects.all().delete() print(\"All auth",
"**options): if not options[\"force\"]: print(\"Include --force if you understand that this will log",
"this logs out and *PERMANENTLY* DELETES THE TOKENS FOR ALL USERS', ) def",
"import Token class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--force', action='store_true', help='WARNING - Understand",
"import BaseCommand from rest_framework.authtoken.models import Token class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--force',",
"add_arguments(self, parser): parser.add_argument( '--force', action='store_true', help='WARNING - Understand that this logs out and",
"- Understand that this logs out and *PERMANENTLY* DELETES THE TOKENS FOR ALL",
"from rest_framework.authtoken.models import Token class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--force', action='store_true', help='WARNING",
") def handle(self, *args, **options): if not options[\"force\"]: print(\"Include --force if you understand",
"print(\"Include --force if you understand that this will log out all users.\") else:",
"USERS', ) def handle(self, *args, **options): if not options[\"force\"]: print(\"Include --force if you",
"Token class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--force', action='store_true', help='WARNING - Understand that",
"handle(self, *args, **options): if not options[\"force\"]: print(\"Include --force if you understand that this",
"def handle(self, *args, **options): if not options[\"force\"]: print(\"Include --force if you understand that",
"*PERMANENTLY* DELETES THE TOKENS FOR ALL USERS', ) def handle(self, *args, **options): if",
"TOKENS FOR ALL USERS', ) def handle(self, *args, **options): if not options[\"force\"]: print(\"Include",
"help='WARNING - Understand that this logs out and *PERMANENTLY* DELETES THE TOKENS FOR",
"that this logs out and *PERMANENTLY* DELETES THE TOKENS FOR ALL USERS', )",
"Understand that this logs out and *PERMANENTLY* DELETES THE TOKENS FOR ALL USERS',",
"logs out and *PERMANENTLY* DELETES THE TOKENS FOR ALL USERS', ) def handle(self,",
"'--force', action='store_true', help='WARNING - Understand that this logs out and *PERMANENTLY* DELETES THE",
"parser): parser.add_argument( '--force', action='store_true', help='WARNING - Understand that this logs out and *PERMANENTLY*",
"BaseCommand from rest_framework.authtoken.models import Token class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--force', action='store_true',",
"not options[\"force\"]: print(\"Include --force if you understand that this will log out all",
"from django.core.management.base import BaseCommand from rest_framework.authtoken.models import Token class Command(BaseCommand): def add_arguments(self, parser):",
"class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument( '--force', action='store_true', help='WARNING - Understand that this",
"options[\"force\"]: print(\"Include --force if you understand that this will log out all users.\")"
] |
[
"Config.COLOUR_WHITE assert return_tile_colour(False) == Config.COLOUR_BLACK def test_coordinates_to_notation(): assert coordinates_to_notation(0, 0) == \"A8\" assert",
"from main import determine_colour, return_tile_colour, coordinates_to_notation def test_determine_colour(): assert determine_colour(0, 0) assert not",
"coordinates_to_notation def test_determine_colour(): assert determine_colour(0, 0) assert not determine_colour(0, 7) assert not determine_colour(7,",
"assert coordinates_to_notation(0, 0) == \"A8\" assert coordinates_to_notation(7, 0) == \"H8\" assert coordinates_to_notation(0, 7)",
"def test_coordinates_to_notation(): assert coordinates_to_notation(0, 0) == \"A8\" assert coordinates_to_notation(7, 0) == \"H8\" assert",
"determine_colour(7, 0) assert determine_colour(7, 7) def test_return_tile_colour(): assert return_tile_colour(True) == Config.COLOUR_WHITE assert return_tile_colour(False)",
"return_tile_colour(True) == Config.COLOUR_WHITE assert return_tile_colour(False) == Config.COLOUR_BLACK def test_coordinates_to_notation(): assert coordinates_to_notation(0, 0) ==",
"test_determine_colour(): assert determine_colour(0, 0) assert not determine_colour(0, 7) assert not determine_colour(7, 0) assert",
"0) assert not determine_colour(0, 7) assert not determine_colour(7, 0) assert determine_colour(7, 7) def",
"assert coordinates_to_notation(7, 0) == \"H8\" assert coordinates_to_notation(0, 7) == \"A1\" assert coordinates_to_notation(7, 7)",
"Config from main import determine_colour, return_tile_colour, coordinates_to_notation def test_determine_colour(): assert determine_colour(0, 0) assert",
"0) == \"H8\" assert coordinates_to_notation(0, 7) == \"A1\" assert coordinates_to_notation(7, 7) == \"H1\"",
"config import Config from main import determine_colour, return_tile_colour, coordinates_to_notation def test_determine_colour(): assert determine_colour(0,",
"determine_colour, return_tile_colour, coordinates_to_notation def test_determine_colour(): assert determine_colour(0, 0) assert not determine_colour(0, 7) assert",
"\"A8\" assert coordinates_to_notation(7, 0) == \"H8\" assert coordinates_to_notation(0, 7) == \"A1\" assert coordinates_to_notation(7,",
"== Config.COLOUR_BLACK def test_coordinates_to_notation(): assert coordinates_to_notation(0, 0) == \"A8\" assert coordinates_to_notation(7, 0) ==",
"return_tile_colour, coordinates_to_notation def test_determine_colour(): assert determine_colour(0, 0) assert not determine_colour(0, 7) assert not",
"7) assert not determine_colour(7, 0) assert determine_colour(7, 7) def test_return_tile_colour(): assert return_tile_colour(True) ==",
"== Config.COLOUR_WHITE assert return_tile_colour(False) == Config.COLOUR_BLACK def test_coordinates_to_notation(): assert coordinates_to_notation(0, 0) == \"A8\"",
"not determine_colour(0, 7) assert not determine_colour(7, 0) assert determine_colour(7, 7) def test_return_tile_colour(): assert",
"assert determine_colour(7, 7) def test_return_tile_colour(): assert return_tile_colour(True) == Config.COLOUR_WHITE assert return_tile_colour(False) == Config.COLOUR_BLACK",
"coordinates_to_notation(0, 0) == \"A8\" assert coordinates_to_notation(7, 0) == \"H8\" assert coordinates_to_notation(0, 7) ==",
"def test_determine_colour(): assert determine_colour(0, 0) assert not determine_colour(0, 7) assert not determine_colour(7, 0)",
"import determine_colour, return_tile_colour, coordinates_to_notation def test_determine_colour(): assert determine_colour(0, 0) assert not determine_colour(0, 7)",
"0) == \"A8\" assert coordinates_to_notation(7, 0) == \"H8\" assert coordinates_to_notation(0, 7) == \"A1\"",
"assert return_tile_colour(True) == Config.COLOUR_WHITE assert return_tile_colour(False) == Config.COLOUR_BLACK def test_coordinates_to_notation(): assert coordinates_to_notation(0, 0)",
"test_coordinates_to_notation(): assert coordinates_to_notation(0, 0) == \"A8\" assert coordinates_to_notation(7, 0) == \"H8\" assert coordinates_to_notation(0,",
"assert determine_colour(0, 0) assert not determine_colour(0, 7) assert not determine_colour(7, 0) assert determine_colour(7,",
"assert not determine_colour(0, 7) assert not determine_colour(7, 0) assert determine_colour(7, 7) def test_return_tile_colour():",
"assert not determine_colour(7, 0) assert determine_colour(7, 7) def test_return_tile_colour(): assert return_tile_colour(True) == Config.COLOUR_WHITE",
"determine_colour(0, 7) assert not determine_colour(7, 0) assert determine_colour(7, 7) def test_return_tile_colour(): assert return_tile_colour(True)",
"return_tile_colour(False) == Config.COLOUR_BLACK def test_coordinates_to_notation(): assert coordinates_to_notation(0, 0) == \"A8\" assert coordinates_to_notation(7, 0)",
"0) assert determine_colour(7, 7) def test_return_tile_colour(): assert return_tile_colour(True) == Config.COLOUR_WHITE assert return_tile_colour(False) ==",
"7) def test_return_tile_colour(): assert return_tile_colour(True) == Config.COLOUR_WHITE assert return_tile_colour(False) == Config.COLOUR_BLACK def test_coordinates_to_notation():",
"determine_colour(7, 7) def test_return_tile_colour(): assert return_tile_colour(True) == Config.COLOUR_WHITE assert return_tile_colour(False) == Config.COLOUR_BLACK def",
"not determine_colour(7, 0) assert determine_colour(7, 7) def test_return_tile_colour(): assert return_tile_colour(True) == Config.COLOUR_WHITE assert",
"def test_return_tile_colour(): assert return_tile_colour(True) == Config.COLOUR_WHITE assert return_tile_colour(False) == Config.COLOUR_BLACK def test_coordinates_to_notation(): assert",
"test_return_tile_colour(): assert return_tile_colour(True) == Config.COLOUR_WHITE assert return_tile_colour(False) == Config.COLOUR_BLACK def test_coordinates_to_notation(): assert coordinates_to_notation(0,",
"assert return_tile_colour(False) == Config.COLOUR_BLACK def test_coordinates_to_notation(): assert coordinates_to_notation(0, 0) == \"A8\" assert coordinates_to_notation(7,",
"== \"A8\" assert coordinates_to_notation(7, 0) == \"H8\" assert coordinates_to_notation(0, 7) == \"A1\" assert",
"coordinates_to_notation(7, 0) == \"H8\" assert coordinates_to_notation(0, 7) == \"A1\" assert coordinates_to_notation(7, 7) ==",
"Config.COLOUR_BLACK def test_coordinates_to_notation(): assert coordinates_to_notation(0, 0) == \"A8\" assert coordinates_to_notation(7, 0) == \"H8\"",
"determine_colour(0, 0) assert not determine_colour(0, 7) assert not determine_colour(7, 0) assert determine_colour(7, 7)",
"main import determine_colour, return_tile_colour, coordinates_to_notation def test_determine_colour(): assert determine_colour(0, 0) assert not determine_colour(0,",
"import Config from main import determine_colour, return_tile_colour, coordinates_to_notation def test_determine_colour(): assert determine_colour(0, 0)",
"from config import Config from main import determine_colour, return_tile_colour, coordinates_to_notation def test_determine_colour(): assert"
] |
[
"then subscriptions will be renewed. for topic in SUBSCRIPTIONS: self.subscribe(topic) def on_message(self, client,",
"self.obj: _obj.receivedMQTT(topic, message, json=json_check) # receiveMQTT() function to receive MQTT pubs except Exception",
"%s]\" % (key, value) message = j json_check = True except: log_message =",
"import * # Importing custom Utility modules from utility.logger import MyLogger log =",
"MQTT pubs except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def send_message(self, topic, message):",
"json.dumps(message)) else: self.publish(topic, message) log.info(\"Sucseful\") #log.info(\"%s published to %s ~ %s\" % (log.timestamp(),",
"def attach(self, _object): ''' Attach a * to receive MQTT publication with :",
"stringified JSON. ''' try: self.obj.append(_object) log.info(\"Attached to broker\") except Exception as e: log.exception(str(e))",
"modules import import json from paho.mqtt.client import * # Variables modules import from",
"Broker self.obj = [] def on_connect(self, client, userdata, flags, rc): ''' Do something",
"<STRING> Payload to send. ''' try: if type(message) == dict: self.publish(topic, json.dumps(message)) else:",
"when the Client successfully connect to the Broker. ''' # Subscribing in on_connect()",
"+= \"[%s > %s]\" % (key, value) message = j json_check = True",
"payload is a valid JSON j = json.loads(msg.payload.decode(\"utf-8\")); log_message = '' for key,",
"something..\") log.exception(str(e)) def attach(self, _object): ''' Attach a * to receive MQTT publication",
"= msg.payload json_check = False log.info(\"%s received from %s ~ %s\" % (log.timestamp(),",
"in on_connect() means that if we lose the connection and # reconnect then",
"SUBSCRIPTIONS: self.subscribe(topic) def on_message(self, client, userdata, msg): ''' Client receiving a publication. '''",
"# receiveMQTT() function to receive MQTT pubs except Exception as e: log.warning(\"Failed something..\")",
"self.username_pw_set(USER, PASSW) self.connect(IP_BROKER, PORT) # Connecting Client to Broker self.obj = [] def",
"USER: # Set authentication, if set self.username_pw_set(USER, PASSW) self.connect(IP_BROKER, PORT) # Connecting Client",
"log.warning(\"Failed something..\") log.exception(str(e)) def send_message(self, topic, message): ''' Publish to destination topic a",
"try: self.obj.append(_object) log.info(\"Attached to broker\") except Exception as e: log.exception(str(e)) log.error(\"Not attached to",
"Marker : Broker client to send and/or receive MQTT publications. ''' def __init__(self):",
"__init__(self): Client.__init__(self) if USER: # Set authentication, if set self.username_pw_set(USER, PASSW) self.connect(IP_BROKER, PORT)",
"json_check = False log.info(\"%s received from %s ~ %s\" % (log.timestamp(), topic, log_message))",
"True when message is a stringified JSON. ''' try: self.obj.append(_object) log.info(\"Attached to broker\")",
"%s\" % (log.timestamp(), topic, log_message)) if self.obj: # Check if any object is",
"Logger class Marker(Client): ''' Client Marker : Broker client to send and/or receive",
"% (log.timestamp(), topic, log_message)) if self.obj: # Check if any object is connected",
"message, json=False):. #CODE json = <BOOL> True when message is a stringified JSON.",
"Do something when the Client successfully connect to the Broker. ''' # Subscribing",
"PASSW) self.connect(IP_BROKER, PORT) # Connecting Client to Broker self.obj = [] def on_connect(self,",
": <STRING> Payload to send. ''' try: if type(message) == dict: self.publish(topic, json.dumps(message))",
"connect to the Broker. ''' # Subscribing in on_connect() means that if we",
"if type(message) == dict: self.publish(topic, json.dumps(message)) else: self.publish(topic, message) log.info(\"Sucseful\") #log.info(\"%s published to",
"means that if we lose the connection and # reconnect then subscriptions will",
"log_message = '' for key, value in j.items(): log_message += \"[%s > %s]\"",
"publication with : def receiveMQTT(topic, message, json=False):. #CODE json = <BOOL> True when",
"JSON j = json.loads(msg.payload.decode(\"utf-8\")); log_message = '' for key, value in j.items(): log_message",
"= [] def on_connect(self, client, userdata, flags, rc): ''' Do something when the",
"import json from paho.mqtt.client import * # Variables modules import from tools import",
"json from paho.mqtt.client import * # Variables modules import from tools import *",
"Variables modules import from tools import * # Importing custom Utility modules from",
"Marker(Client): ''' Client Marker : Broker client to send and/or receive MQTT publications.",
"Set authentication, if set self.username_pw_set(USER, PASSW) self.connect(IP_BROKER, PORT) # Connecting Client to Broker",
"json.loads(msg.payload.decode(\"utf-8\")); log_message = '' for key, value in j.items(): log_message += \"[%s >",
"Importing custom Utility modules from utility.logger import MyLogger log = MyLogger(\"mqtt\") # Logger",
"def send_message(self, topic, message): ''' Publish to destination topic a message. topic :",
"in self.obj: _obj.receivedMQTT(topic, message, json=json_check) # receiveMQTT() function to receive MQTT pubs except",
"''' try: if type(message) == dict: self.publish(topic, json.dumps(message)) else: self.publish(topic, message) log.info(\"Sucseful\") #log.info(\"%s",
"destination topic a message. topic : <STRING> Topic where to send. message :",
"else: self.publish(topic, message) log.info(\"Sucseful\") #log.info(\"%s published to %s ~ %s\" % (log.timestamp(), topic,",
"except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def send_message(self, topic, message): ''' Publish",
"set self.username_pw_set(USER, PASSW) self.connect(IP_BROKER, PORT) # Connecting Client to Broker self.obj = []",
"''' def __init__(self): Client.__init__(self) if USER: # Set authentication, if set self.username_pw_set(USER, PASSW)",
"the Broker. ''' # Subscribing in on_connect() means that if we lose the",
"= msg.topic try: # Checks if payload is a valid JSON j =",
"key, value in j.items(): log_message += \"[%s > %s]\" % (key, value) message",
"function to receive MQTT pubs except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def",
"pubs except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def send_message(self, topic, message): '''",
"tools import * # Importing custom Utility modules from utility.logger import MyLogger log",
"~ %s\" % (log.timestamp(), topic, log_message)) if self.obj: # Check if any object",
"attach(self, _object): ''' Attach a * to receive MQTT publication with : def",
"''' Client Marker : Broker client to send and/or receive MQTT publications. '''",
"~ %s\" % (log.timestamp(), topic, message)) except Exception as e: log.warning(\"Failed something..\") log.exception(str(e))",
"to send. ''' try: if type(message) == dict: self.publish(topic, json.dumps(message)) else: self.publish(topic, message)",
"topic : <STRING> Topic where to send. message : <STRING> Payload to send.",
"msg.topic try: # Checks if payload is a valid JSON j = json.loads(msg.payload.decode(\"utf-8\"));",
"def receiveMQTT(topic, message, json=False):. #CODE json = <BOOL> True when message is a",
"Client receiving a publication. ''' try: topic = msg.topic try: # Checks if",
"Publish to destination topic a message. topic : <STRING> Topic where to send.",
"log.info(\"Sucseful\") #log.info(\"%s published to %s ~ %s\" % (log.timestamp(), topic, message)) except Exception",
"if self.obj: # Check if any object is connected to MQTT for _obj",
"# Subscribing in on_connect() means that if we lose the connection and #",
"self.connect(IP_BROKER, PORT) # Connecting Client to Broker self.obj = [] def on_connect(self, client,",
"% (key, value) message = j json_check = True except: log_message = msg.payload",
"# Essential modules import import json from paho.mqtt.client import * # Variables modules",
"any object is connected to MQTT for _obj in self.obj: _obj.receivedMQTT(topic, message, json=json_check)",
"to MQTT for _obj in self.obj: _obj.receivedMQTT(topic, message, json=json_check) # receiveMQTT() function to",
"will be renewed. for topic in SUBSCRIPTIONS: self.subscribe(topic) def on_message(self, client, userdata, msg):",
"* to receive MQTT publication with : def receiveMQTT(topic, message, json=False):. #CODE json",
"self.obj.append(_object) log.info(\"Attached to broker\") except Exception as e: log.exception(str(e)) log.error(\"Not attached to broker\")",
"connected to MQTT for _obj in self.obj: _obj.receivedMQTT(topic, message, json=json_check) # receiveMQTT() function",
"# Connecting Client to Broker self.obj = [] def on_connect(self, client, userdata, flags,",
"a valid JSON j = json.loads(msg.payload.decode(\"utf-8\")); log_message = '' for key, value in",
"_obj in self.obj: _obj.receivedMQTT(topic, message, json=json_check) # receiveMQTT() function to receive MQTT pubs",
"from utility.logger import MyLogger log = MyLogger(\"mqtt\") # Logger class Marker(Client): ''' Client",
"Utility modules from utility.logger import MyLogger log = MyLogger(\"mqtt\") # Logger class Marker(Client):",
"msg): ''' Client receiving a publication. ''' try: topic = msg.topic try: #",
"e: log.warning(\"Failed something..\") log.exception(str(e)) def attach(self, _object): ''' Attach a * to receive",
"receive MQTT publication with : def receiveMQTT(topic, message, json=False):. #CODE json = <BOOL>",
"def __init__(self): Client.__init__(self) if USER: # Set authentication, if set self.username_pw_set(USER, PASSW) self.connect(IP_BROKER,",
"modules import from tools import * # Importing custom Utility modules from utility.logger",
"value) message = j json_check = True except: log_message = msg.payload message =",
"topic = msg.topic try: # Checks if payload is a valid JSON j",
"try: # Checks if payload is a valid JSON j = json.loads(msg.payload.decode(\"utf-8\")); log_message",
"self.publish(topic, json.dumps(message)) else: self.publish(topic, message) log.info(\"Sucseful\") #log.info(\"%s published to %s ~ %s\" %",
"JSON. ''' try: self.obj.append(_object) log.info(\"Attached to broker\") except Exception as e: log.exception(str(e)) log.error(\"Not",
"publications. ''' def __init__(self): Client.__init__(self) if USER: # Set authentication, if set self.username_pw_set(USER,",
"e: log.warning(\"Failed something..\") log.exception(str(e)) def send_message(self, topic, message): ''' Publish to destination topic",
"%s ~ %s\" % (log.timestamp(), topic, log_message)) if self.obj: # Check if any",
"''' Client receiving a publication. ''' try: topic = msg.topic try: # Checks",
"a message. topic : <STRING> Topic where to send. message : <STRING> Payload",
"* # Importing custom Utility modules from utility.logger import MyLogger log = MyLogger(\"mqtt\")",
"log.info(\"%s received from %s ~ %s\" % (log.timestamp(), topic, log_message)) if self.obj: #",
"modules from utility.logger import MyLogger log = MyLogger(\"mqtt\") # Logger class Marker(Client): '''",
"to receive MQTT pubs except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def send_message(self,",
"paho.mqtt.client import * # Variables modules import from tools import * # Importing",
"def on_message(self, client, userdata, msg): ''' Client receiving a publication. ''' try: topic",
"topic a message. topic : <STRING> Topic where to send. message : <STRING>",
"self.subscribe(topic) def on_message(self, client, userdata, msg): ''' Client receiving a publication. ''' try:",
": Broker client to send and/or receive MQTT publications. ''' def __init__(self): Client.__init__(self)",
"something..\") log.exception(str(e)) def send_message(self, topic, message): ''' Publish to destination topic a message.",
"on_message(self, client, userdata, msg): ''' Client receiving a publication. ''' try: topic =",
"Client successfully connect to the Broker. ''' # Subscribing in on_connect() means that",
"Checks if payload is a valid JSON j = json.loads(msg.payload.decode(\"utf-8\")); log_message = ''",
"def on_connect(self, client, userdata, flags, rc): ''' Do something when the Client successfully",
"try: if type(message) == dict: self.publish(topic, json.dumps(message)) else: self.publish(topic, message) log.info(\"Sucseful\") #log.info(\"%s published",
"Check if any object is connected to MQTT for _obj in self.obj: _obj.receivedMQTT(topic,",
"send and/or receive MQTT publications. ''' def __init__(self): Client.__init__(self) if USER: # Set",
"that if we lose the connection and # reconnect then subscriptions will be",
"self.obj = [] def on_connect(self, client, userdata, flags, rc): ''' Do something when",
"to receive MQTT publication with : def receiveMQTT(topic, message, json=False):. #CODE json =",
": <STRING> Topic where to send. message : <STRING> Payload to send. '''",
"receiveMQTT(topic, message, json=False):. #CODE json = <BOOL> True when message is a stringified",
"Subscribing in on_connect() means that if we lose the connection and # reconnect",
"publication. ''' try: topic = msg.topic try: # Checks if payload is a",
"published to %s ~ %s\" % (log.timestamp(), topic, message)) except Exception as e:",
"to %s ~ %s\" % (log.timestamp(), topic, message)) except Exception as e: log.warning(\"Failed",
"client to send and/or receive MQTT publications. ''' def __init__(self): Client.__init__(self) if USER:",
"lose the connection and # reconnect then subscriptions will be renewed. for topic",
"= j json_check = True except: log_message = msg.payload message = msg.payload json_check",
"self.obj: # Check if any object is connected to MQTT for _obj in",
"%s ~ %s\" % (log.timestamp(), topic, message)) except Exception as e: log.warning(\"Failed something..\")",
"to Broker self.obj = [] def on_connect(self, client, userdata, flags, rc): ''' Do",
"a stringified JSON. ''' try: self.obj.append(_object) log.info(\"Attached to broker\") except Exception as e:",
"# Set authentication, if set self.username_pw_set(USER, PASSW) self.connect(IP_BROKER, PORT) # Connecting Client to",
"msg.payload message = msg.payload json_check = False log.info(\"%s received from %s ~ %s\"",
"dict: self.publish(topic, json.dumps(message)) else: self.publish(topic, message) log.info(\"Sucseful\") #log.info(\"%s published to %s ~ %s\"",
"''' Attach a * to receive MQTT publication with : def receiveMQTT(topic, message,",
"from %s ~ %s\" % (log.timestamp(), topic, log_message)) if self.obj: # Check if",
"type(message) == dict: self.publish(topic, json.dumps(message)) else: self.publish(topic, message) log.info(\"Sucseful\") #log.info(\"%s published to %s",
"Broker client to send and/or receive MQTT publications. ''' def __init__(self): Client.__init__(self) if",
"% (log.timestamp(), topic, message)) except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def attach(self,",
"a * to receive MQTT publication with : def receiveMQTT(topic, message, json=False):. #CODE",
"message = msg.payload json_check = False log.info(\"%s received from %s ~ %s\" %",
"for _obj in self.obj: _obj.receivedMQTT(topic, message, json=json_check) # receiveMQTT() function to receive MQTT",
"''' try: topic = msg.topic try: # Checks if payload is a valid",
"successfully connect to the Broker. ''' # Subscribing in on_connect() means that if",
"[] def on_connect(self, client, userdata, flags, rc): ''' Do something when the Client",
"to the Broker. ''' # Subscribing in on_connect() means that if we lose",
"Broker. ''' # Subscribing in on_connect() means that if we lose the connection",
"receive MQTT pubs except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def send_message(self, topic,",
"<STRING> Topic where to send. message : <STRING> Payload to send. ''' try:",
"connection and # reconnect then subscriptions will be renewed. for topic in SUBSCRIPTIONS:",
"where to send. message : <STRING> Payload to send. ''' try: if type(message)",
"send. message : <STRING> Payload to send. ''' try: if type(message) == dict:",
"#CODE json = <BOOL> True when message is a stringified JSON. ''' try:",
"topic in SUBSCRIPTIONS: self.subscribe(topic) def on_message(self, client, userdata, msg): ''' Client receiving a",
"to destination topic a message. topic : <STRING> Topic where to send. message",
"the Client successfully connect to the Broker. ''' # Subscribing in on_connect() means",
"renewed. for topic in SUBSCRIPTIONS: self.subscribe(topic) def on_message(self, client, userdata, msg): ''' Client",
"is connected to MQTT for _obj in self.obj: _obj.receivedMQTT(topic, message, json=json_check) # receiveMQTT()",
"topic, message): ''' Publish to destination topic a message. topic : <STRING> Topic",
"log.exception(str(e)) def send_message(self, topic, message): ''' Publish to destination topic a message. topic",
"userdata, flags, rc): ''' Do something when the Client successfully connect to the",
"Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def attach(self, _object): ''' Attach a *",
"when message is a stringified JSON. ''' try: self.obj.append(_object) log.info(\"Attached to broker\") except",
"object is connected to MQTT for _obj in self.obj: _obj.receivedMQTT(topic, message, json=json_check) #",
"send_message(self, topic, message): ''' Publish to destination topic a message. topic : <STRING>",
"json = <BOOL> True when message is a stringified JSON. ''' try: self.obj.append(_object)",
"class Marker(Client): ''' Client Marker : Broker client to send and/or receive MQTT",
"* # Variables modules import from tools import * # Importing custom Utility",
"Client.__init__(self) if USER: # Set authentication, if set self.username_pw_set(USER, PASSW) self.connect(IP_BROKER, PORT) #",
"j = json.loads(msg.payload.decode(\"utf-8\")); log_message = '' for key, value in j.items(): log_message +=",
"(key, value) message = j json_check = True except: log_message = msg.payload message",
"j.items(): log_message += \"[%s > %s]\" % (key, value) message = j json_check",
"Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def send_message(self, topic, message): ''' Publish to",
"message) log.info(\"Sucseful\") #log.info(\"%s published to %s ~ %s\" % (log.timestamp(), topic, message)) except",
"except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def attach(self, _object): ''' Attach a",
"if USER: # Set authentication, if set self.username_pw_set(USER, PASSW) self.connect(IP_BROKER, PORT) # Connecting",
"'' for key, value in j.items(): log_message += \"[%s > %s]\" % (key,",
"client, userdata, flags, rc): ''' Do something when the Client successfully connect to",
"_object): ''' Attach a * to receive MQTT publication with : def receiveMQTT(topic,",
"log = MyLogger(\"mqtt\") # Logger class Marker(Client): ''' Client Marker : Broker client",
"if we lose the connection and # reconnect then subscriptions will be renewed.",
"import MyLogger log = MyLogger(\"mqtt\") # Logger class Marker(Client): ''' Client Marker :",
"# Check if any object is connected to MQTT for _obj in self.obj:",
"we lose the connection and # reconnect then subscriptions will be renewed. for",
"_obj.receivedMQTT(topic, message, json=json_check) # receiveMQTT() function to receive MQTT pubs except Exception as",
"''' try: self.obj.append(_object) log.info(\"Attached to broker\") except Exception as e: log.exception(str(e)) log.error(\"Not attached",
"for topic in SUBSCRIPTIONS: self.subscribe(topic) def on_message(self, client, userdata, msg): ''' Client receiving",
"PORT) # Connecting Client to Broker self.obj = [] def on_connect(self, client, userdata,",
"flags, rc): ''' Do something when the Client successfully connect to the Broker.",
"message): ''' Publish to destination topic a message. topic : <STRING> Topic where",
"log.warning(\"Failed something..\") log.exception(str(e)) def attach(self, _object): ''' Attach a * to receive MQTT",
"= MyLogger(\"mqtt\") # Logger class Marker(Client): ''' Client Marker : Broker client to",
"''' # Subscribing in on_connect() means that if we lose the connection and",
"= msg.payload message = msg.payload json_check = False log.info(\"%s received from %s ~",
"message, json=json_check) # receiveMQTT() function to receive MQTT pubs except Exception as e:",
"json=json_check) # receiveMQTT() function to receive MQTT pubs except Exception as e: log.warning(\"Failed",
"and # reconnect then subscriptions will be renewed. for topic in SUBSCRIPTIONS: self.subscribe(topic)",
"topic, log_message)) if self.obj: # Check if any object is connected to MQTT",
"MyLogger(\"mqtt\") # Logger class Marker(Client): ''' Client Marker : Broker client to send",
"on_connect(self, client, userdata, flags, rc): ''' Do something when the Client successfully connect",
"topic, message)) except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def attach(self, _object): '''",
"self.publish(topic, message) log.info(\"Sucseful\") #log.info(\"%s published to %s ~ %s\" % (log.timestamp(), topic, message))",
"value in j.items(): log_message += \"[%s > %s]\" % (key, value) message =",
"be renewed. for topic in SUBSCRIPTIONS: self.subscribe(topic) def on_message(self, client, userdata, msg): '''",
"log.exception(str(e)) def attach(self, _object): ''' Attach a * to receive MQTT publication with",
"= '' for key, value in j.items(): log_message += \"[%s > %s]\" %",
"= <BOOL> True when message is a stringified JSON. ''' try: self.obj.append(_object) log.info(\"Attached",
"# Logger class Marker(Client): ''' Client Marker : Broker client to send and/or",
"MQTT publication with : def receiveMQTT(topic, message, json=False):. #CODE json = <BOOL> True",
"message : <STRING> Payload to send. ''' try: if type(message) == dict: self.publish(topic,",
"subscriptions will be renewed. for topic in SUBSCRIPTIONS: self.subscribe(topic) def on_message(self, client, userdata,",
"as e: log.warning(\"Failed something..\") log.exception(str(e)) def attach(self, _object): ''' Attach a * to",
"with : def receiveMQTT(topic, message, json=False):. #CODE json = <BOOL> True when message",
"for key, value in j.items(): log_message += \"[%s > %s]\" % (key, value)",
"in j.items(): log_message += \"[%s > %s]\" % (key, value) message = j",
"Essential modules import import json from paho.mqtt.client import * # Variables modules import",
"log_message += \"[%s > %s]\" % (key, value) message = j json_check =",
"if payload is a valid JSON j = json.loads(msg.payload.decode(\"utf-8\")); log_message = '' for",
"received from %s ~ %s\" % (log.timestamp(), topic, log_message)) if self.obj: # Check",
"import import json from paho.mqtt.client import * # Variables modules import from tools",
"MQTT for _obj in self.obj: _obj.receivedMQTT(topic, message, json=json_check) # receiveMQTT() function to receive",
"as e: log.warning(\"Failed something..\") log.exception(str(e)) def send_message(self, topic, message): ''' Publish to destination",
"(log.timestamp(), topic, message)) except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def attach(self, _object):",
"something when the Client successfully connect to the Broker. ''' # Subscribing in",
"is a stringified JSON. ''' try: self.obj.append(_object) log.info(\"Attached to broker\") except Exception as",
"j json_check = True except: log_message = msg.payload message = msg.payload json_check =",
"msg.payload json_check = False log.info(\"%s received from %s ~ %s\" % (log.timestamp(), topic,",
"json=False):. #CODE json = <BOOL> True when message is a stringified JSON. '''",
"to send. message : <STRING> Payload to send. ''' try: if type(message) ==",
"in SUBSCRIPTIONS: self.subscribe(topic) def on_message(self, client, userdata, msg): ''' Client receiving a publication.",
"authentication, if set self.username_pw_set(USER, PASSW) self.connect(IP_BROKER, PORT) # Connecting Client to Broker self.obj",
"> %s]\" % (key, value) message = j json_check = True except: log_message",
"valid JSON j = json.loads(msg.payload.decode(\"utf-8\")); log_message = '' for key, value in j.items():",
"= True except: log_message = msg.payload message = msg.payload json_check = False log.info(\"%s",
"MQTT publications. ''' def __init__(self): Client.__init__(self) if USER: # Set authentication, if set",
"try: topic = msg.topic try: # Checks if payload is a valid JSON",
"== dict: self.publish(topic, json.dumps(message)) else: self.publish(topic, message) log.info(\"Sucseful\") #log.info(\"%s published to %s ~",
"Client to Broker self.obj = [] def on_connect(self, client, userdata, flags, rc): '''",
"import from tools import * # Importing custom Utility modules from utility.logger import",
"message. topic : <STRING> Topic where to send. message : <STRING> Payload to",
"receive MQTT publications. ''' def __init__(self): Client.__init__(self) if USER: # Set authentication, if",
"receiving a publication. ''' try: topic = msg.topic try: # Checks if payload",
"False log.info(\"%s received from %s ~ %s\" % (log.timestamp(), topic, log_message)) if self.obj:",
"(log.timestamp(), topic, log_message)) if self.obj: # Check if any object is connected to",
"receiveMQTT() function to receive MQTT pubs except Exception as e: log.warning(\"Failed something..\") log.exception(str(e))",
"and/or receive MQTT publications. ''' def __init__(self): Client.__init__(self) if USER: # Set authentication,",
"from tools import * # Importing custom Utility modules from utility.logger import MyLogger",
"\"[%s > %s]\" % (key, value) message = j json_check = True except:",
"if any object is connected to MQTT for _obj in self.obj: _obj.receivedMQTT(topic, message,",
"if set self.username_pw_set(USER, PASSW) self.connect(IP_BROKER, PORT) # Connecting Client to Broker self.obj =",
"client, userdata, msg): ''' Client receiving a publication. ''' try: topic = msg.topic",
"json_check = True except: log_message = msg.payload message = msg.payload json_check = False",
"log_message = msg.payload message = msg.payload json_check = False log.info(\"%s received from %s",
"Connecting Client to Broker self.obj = [] def on_connect(self, client, userdata, flags, rc):",
"''' Do something when the Client successfully connect to the Broker. ''' #",
"message)) except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def attach(self, _object): ''' Attach",
"from paho.mqtt.client import * # Variables modules import from tools import * #",
"the connection and # reconnect then subscriptions will be renewed. for topic in",
"to send and/or receive MQTT publications. ''' def __init__(self): Client.__init__(self) if USER: #",
"%s\" % (log.timestamp(), topic, message)) except Exception as e: log.warning(\"Failed something..\") log.exception(str(e)) def",
"Attach a * to receive MQTT publication with : def receiveMQTT(topic, message, json=False):.",
"#log.info(\"%s published to %s ~ %s\" % (log.timestamp(), topic, message)) except Exception as",
": def receiveMQTT(topic, message, json=False):. #CODE json = <BOOL> True when message is",
"send. ''' try: if type(message) == dict: self.publish(topic, json.dumps(message)) else: self.publish(topic, message) log.info(\"Sucseful\")",
"import * # Variables modules import from tools import * # Importing custom",
"= json.loads(msg.payload.decode(\"utf-8\")); log_message = '' for key, value in j.items(): log_message += \"[%s",
"<BOOL> True when message is a stringified JSON. ''' try: self.obj.append(_object) log.info(\"Attached to",
"Topic where to send. message : <STRING> Payload to send. ''' try: if",
"True except: log_message = msg.payload message = msg.payload json_check = False log.info(\"%s received",
"Client Marker : Broker client to send and/or receive MQTT publications. ''' def",
"is a valid JSON j = json.loads(msg.payload.decode(\"utf-8\")); log_message = '' for key, value",
"custom Utility modules from utility.logger import MyLogger log = MyLogger(\"mqtt\") # Logger class",
"log_message)) if self.obj: # Check if any object is connected to MQTT for",
"''' Publish to destination topic a message. topic : <STRING> Topic where to",
"reconnect then subscriptions will be renewed. for topic in SUBSCRIPTIONS: self.subscribe(topic) def on_message(self,",
"utility.logger import MyLogger log = MyLogger(\"mqtt\") # Logger class Marker(Client): ''' Client Marker",
"message is a stringified JSON. ''' try: self.obj.append(_object) log.info(\"Attached to broker\") except Exception",
"# Checks if payload is a valid JSON j = json.loads(msg.payload.decode(\"utf-8\")); log_message =",
"message = j json_check = True except: log_message = msg.payload message = msg.payload",
"# Importing custom Utility modules from utility.logger import MyLogger log = MyLogger(\"mqtt\") #",
"MyLogger log = MyLogger(\"mqtt\") # Logger class Marker(Client): ''' Client Marker : Broker",
"on_connect() means that if we lose the connection and # reconnect then subscriptions",
"except: log_message = msg.payload message = msg.payload json_check = False log.info(\"%s received from",
"rc): ''' Do something when the Client successfully connect to the Broker. '''",
"= False log.info(\"%s received from %s ~ %s\" % (log.timestamp(), topic, log_message)) if",
"Payload to send. ''' try: if type(message) == dict: self.publish(topic, json.dumps(message)) else: self.publish(topic,",
"# Variables modules import from tools import * # Importing custom Utility modules",
"userdata, msg): ''' Client receiving a publication. ''' try: topic = msg.topic try:",
"# reconnect then subscriptions will be renewed. for topic in SUBSCRIPTIONS: self.subscribe(topic) def",
"a publication. ''' try: topic = msg.topic try: # Checks if payload is"
] |
[
"iface in netifaces.interfaces(): try: ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError: pass except ValueError: pass",
"ValueError: pass d = jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d) def restart(): os.system(\"sysconf/vmfw.sh\")",
"ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError: pass except ValueError: pass d = jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip,",
"ip=ip, vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d) def restart(): os.system(\"sysconf/vmfw.sh\") if __name__ == \"__main__\": from",
"\"w\").write(d) def restart(): os.system(\"sysconf/vmfw.sh\") if __name__ == \"__main__\": from models import PortForward update(PortForward.select())",
"generator def update(data): jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for iface in netifaces.interfaces(): try: ip[iface]",
"sys.path.append(os.path.join(os.getcwd(), \"netifaces-0.5-py2.5-linux-i686.egg\")) import jinja2, netifaces _config = ConfigParser.SafeConfigParser() _config.read(\"config.ini\") # iptables forwarding configuration",
"update(data): jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for iface in netifaces.interfaces(): try: ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr']",
"netifaces.interfaces(): try: ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError: pass except ValueError: pass d =",
"open(\"sysconf/vmfw.sh\", \"w\").write(d) def restart(): os.system(\"sysconf/vmfw.sh\") if __name__ == \"__main__\": from models import PortForward",
"jinja2, netifaces _config = ConfigParser.SafeConfigParser() _config.read(\"config.ini\") # iptables forwarding configuration generator def update(data):",
"netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError: pass except ValueError: pass d = jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\", \"vmIP\"))",
"jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for iface in netifaces.interfaces(): try: ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except",
"os, sys, ConfigParser sys.path.insert(0, os.path.join(os.getcwd(), \"Jinja2-2.3-py2.5.egg\")) sys.path.append(os.path.join(os.getcwd(), \"netifaces-0.5-py2.5-linux-i686.egg\")) import jinja2, netifaces _config =",
"\"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d) def restart(): os.system(\"sysconf/vmfw.sh\") if __name__ == \"__main__\": from models import",
"= jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for iface in netifaces.interfaces(): try: ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError:",
"iptables forwarding configuration generator def update(data): jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for iface in",
"= ConfigParser.SafeConfigParser() _config.read(\"config.ini\") # iptables forwarding configuration generator def update(data): jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\"))",
"import jinja2, netifaces _config = ConfigParser.SafeConfigParser() _config.read(\"config.ini\") # iptables forwarding configuration generator def",
"vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d) def restart(): os.system(\"sysconf/vmfw.sh\") if __name__ == \"__main__\": from models",
"# iptables forwarding configuration generator def update(data): jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for iface",
"def update(data): jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for iface in netifaces.interfaces(): try: ip[iface] =",
"= jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d) def restart(): os.system(\"sysconf/vmfw.sh\") if __name__ ==",
"ip={} for iface in netifaces.interfaces(): try: ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError: pass except",
"except ValueError: pass d = jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d) def restart():",
"_config.read(\"config.ini\") # iptables forwarding configuration generator def update(data): jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for",
"for iface in netifaces.interfaces(): try: ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError: pass except ValueError:",
"except KeyError: pass except ValueError: pass d = jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\",",
"sys, ConfigParser sys.path.insert(0, os.path.join(os.getcwd(), \"Jinja2-2.3-py2.5.egg\")) sys.path.append(os.path.join(os.getcwd(), \"netifaces-0.5-py2.5-linux-i686.egg\")) import jinja2, netifaces _config = ConfigParser.SafeConfigParser()",
"\"netifaces-0.5-py2.5-linux-i686.egg\")) import jinja2, netifaces _config = ConfigParser.SafeConfigParser() _config.read(\"config.ini\") # iptables forwarding configuration generator",
"sys.path.insert(0, os.path.join(os.getcwd(), \"Jinja2-2.3-py2.5.egg\")) sys.path.append(os.path.join(os.getcwd(), \"netifaces-0.5-py2.5-linux-i686.egg\")) import jinja2, netifaces _config = ConfigParser.SafeConfigParser() _config.read(\"config.ini\") #",
"os.path.join(os.getcwd(), \"Jinja2-2.3-py2.5.egg\")) sys.path.append(os.path.join(os.getcwd(), \"netifaces-0.5-py2.5-linux-i686.egg\")) import jinja2, netifaces _config = ConfigParser.SafeConfigParser() _config.read(\"config.ini\") # iptables",
"pass except ValueError: pass d = jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d) def",
"ConfigParser.SafeConfigParser() _config.read(\"config.ini\") # iptables forwarding configuration generator def update(data): jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={}",
"import os, sys, ConfigParser sys.path.insert(0, os.path.join(os.getcwd(), \"Jinja2-2.3-py2.5.egg\")) sys.path.append(os.path.join(os.getcwd(), \"netifaces-0.5-py2.5-linux-i686.egg\")) import jinja2, netifaces _config",
"jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for iface in netifaces.interfaces(): try: ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError: pass",
"in netifaces.interfaces(): try: ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError: pass except ValueError: pass d",
"try: ip[iface] = netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError: pass except ValueError: pass d = jinja.get_template(\"vmfw.sh\").render(port=data,",
"= netifaces.ifaddresses(iface)[netifaces.AF_INET][0]['addr'] except KeyError: pass except ValueError: pass d = jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\",",
"forwarding configuration generator def update(data): jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for iface in netifaces.interfaces():",
"_config = ConfigParser.SafeConfigParser() _config.read(\"config.ini\") # iptables forwarding configuration generator def update(data): jinja =",
"KeyError: pass except ValueError: pass d = jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d)",
"\"Jinja2-2.3-py2.5.egg\")) sys.path.append(os.path.join(os.getcwd(), \"netifaces-0.5-py2.5-linux-i686.egg\")) import jinja2, netifaces _config = ConfigParser.SafeConfigParser() _config.read(\"config.ini\") # iptables forwarding",
"jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d) def restart(): os.system(\"sysconf/vmfw.sh\") if __name__ == \"__main__\":",
"pass d = jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d) def restart(): os.system(\"sysconf/vmfw.sh\") if",
"d = jinja.get_template(\"vmfw.sh\").render(port=data, ip=ip, vmip=_config.get(\"iface\", \"vmIP\")) open(\"sysconf/vmfw.sh\", \"w\").write(d) def restart(): os.system(\"sysconf/vmfw.sh\") if __name__",
"netifaces _config = ConfigParser.SafeConfigParser() _config.read(\"config.ini\") # iptables forwarding configuration generator def update(data): jinja",
"configuration generator def update(data): jinja = jinja2.Environment(loader=jinja2.loaders.FileSystemLoader(\"template\")) ip={} for iface in netifaces.interfaces(): try:",
"def restart(): os.system(\"sysconf/vmfw.sh\") if __name__ == \"__main__\": from models import PortForward update(PortForward.select()) restart()",
"ConfigParser sys.path.insert(0, os.path.join(os.getcwd(), \"Jinja2-2.3-py2.5.egg\")) sys.path.append(os.path.join(os.getcwd(), \"netifaces-0.5-py2.5-linux-i686.egg\")) import jinja2, netifaces _config = ConfigParser.SafeConfigParser() _config.read(\"config.ini\")"
] |
[] |
[
"elements of A. # # You may return any answer array that satisfies",
"\"\"\" output = [] for i in A: if i % 2 ==",
"# # # Example 1: # # Input: [3,1,2,4] # Output: [2,4,3,1] #",
"# # # # Example 1: # # Input: [3,1,2,4] # Output: [2,4,3,1]",
"[4,2,3,1], [2,4,1,3], and [4,2,1,3] would also be accepted. # # # Note: #",
"A of non-negative integers, return an array consisting of all the even elements",
"1 <= A.length <= 5000 # 0 <= A[i] <= 5000 class Solution:",
"[3,1,2,4] # Output: [2,4,3,1] # The outputs [4,2,3,1], [2,4,1,3], and [4,2,1,3] would also",
"You may return any answer array that satisfies this condition. # # #",
"A: if i % 2 == 0: output = [i] + output else:",
"in A: if i % 2 == 0: output = [i] + output",
"<= 5000 class Solution: def sortArrayByParity(self, A): \"\"\" :type A: List[int] :rtype: List[int]",
"outputs [4,2,3,1], [2,4,1,3], and [4,2,1,3] would also be accepted. # # # Note:",
"return any answer array that satisfies this condition. # # # # Example",
"elements of A, followed by all the odd elements of A. # #",
"1: # # Input: [3,1,2,4] # Output: [2,4,3,1] # The outputs [4,2,3,1], [2,4,1,3],",
"2 == 0: output = [i] + output else: output += [i] return",
"<= A.length <= 5000 # 0 <= A[i] <= 5000 class Solution: def",
"# Output: [2,4,3,1] # The outputs [4,2,3,1], [2,4,1,3], and [4,2,1,3] would also be",
"A): \"\"\" :type A: List[int] :rtype: List[int] \"\"\" output = [] for i",
"for i in A: if i % 2 == 0: output = [i]",
"also be accepted. # # # Note: # # 1 <= A.length <=",
"accepted. # # # Note: # # 1 <= A.length <= 5000 #",
"an array consisting of all the even elements of A, followed by all",
"output = [] for i in A: if i % 2 == 0:",
"of all the even elements of A, followed by all the odd elements",
"may return any answer array that satisfies this condition. # # # #",
"condition. # # # # Example 1: # # Input: [3,1,2,4] # Output:",
"Sort Array By Parity.py # Array # Given an array A of non-negative",
"this condition. # # # # Example 1: # # Input: [3,1,2,4] #",
"# Array # Given an array A of non-negative integers, return an array",
"# Note: # # 1 <= A.length <= 5000 # 0 <= A[i]",
"by all the odd elements of A. # # You may return any",
"= [] for i in A: if i % 2 == 0: output",
"Solution: def sortArrayByParity(self, A): \"\"\" :type A: List[int] :rtype: List[int] \"\"\" output =",
"Output: [2,4,3,1] # The outputs [4,2,3,1], [2,4,1,3], and [4,2,1,3] would also be accepted.",
"5000 class Solution: def sortArrayByParity(self, A): \"\"\" :type A: List[int] :rtype: List[int] \"\"\"",
"A: List[int] :rtype: List[int] \"\"\" output = [] for i in A: if",
"== 0: output = [i] + output else: output += [i] return output",
"# # # Note: # # 1 <= A.length <= 5000 # 0",
"# Input: [3,1,2,4] # Output: [2,4,3,1] # The outputs [4,2,3,1], [2,4,1,3], and [4,2,1,3]",
"any answer array that satisfies this condition. # # # # Example 1:",
"5000 # 0 <= A[i] <= 5000 class Solution: def sortArrayByParity(self, A): \"\"\"",
"class Solution: def sortArrayByParity(self, A): \"\"\" :type A: List[int] :rtype: List[int] \"\"\" output",
"if i % 2 == 0: output = [i] + output else: output",
"[] for i in A: if i % 2 == 0: output =",
"# # Input: [3,1,2,4] # Output: [2,4,3,1] # The outputs [4,2,3,1], [2,4,1,3], and",
"would also be accepted. # # # Note: # # 1 <= A.length",
"even elements of A, followed by all the odd elements of A. #",
"List[int] \"\"\" output = [] for i in A: if i % 2",
"Note: # # 1 <= A.length <= 5000 # 0 <= A[i] <=",
"Array By Parity.py # Array # Given an array A of non-negative integers,",
"The outputs [4,2,3,1], [2,4,1,3], and [4,2,1,3] would also be accepted. # # #",
"List[int] :rtype: List[int] \"\"\" output = [] for i in A: if i",
"# # You may return any answer array that satisfies this condition. #",
":rtype: List[int] \"\"\" output = [] for i in A: if i %",
"and [4,2,1,3] would also be accepted. # # # Note: # # 1",
"return an array consisting of all the even elements of A, followed by",
"the odd elements of A. # # You may return any answer array",
"followed by all the odd elements of A. # # You may return",
"an array A of non-negative integers, return an array consisting of all the",
"Parity.py # Array # Given an array A of non-negative integers, return an",
"# 0 <= A[i] <= 5000 class Solution: def sortArrayByParity(self, A): \"\"\" :type",
"of A. # # You may return any answer array that satisfies this",
"answer array that satisfies this condition. # # # # Example 1: #",
":type A: List[int] :rtype: List[int] \"\"\" output = [] for i in A:",
"be accepted. # # # Note: # # 1 <= A.length <= 5000",
"\"\"\" :type A: List[int] :rtype: List[int] \"\"\" output = [] for i in",
"satisfies this condition. # # # # Example 1: # # Input: [3,1,2,4]",
"[4,2,1,3] would also be accepted. # # # Note: # # 1 <=",
"<reponame>gesuwen/Algorithms<filename>LeetCode/905 Sort Array By Parity.py # Array # Given an array A of",
"Input: [3,1,2,4] # Output: [2,4,3,1] # The outputs [4,2,3,1], [2,4,1,3], and [4,2,1,3] would",
"Example 1: # # Input: [3,1,2,4] # Output: [2,4,3,1] # The outputs [4,2,3,1],",
"# The outputs [4,2,3,1], [2,4,1,3], and [4,2,1,3] would also be accepted. # #",
"of non-negative integers, return an array consisting of all the even elements of",
"# # Example 1: # # Input: [3,1,2,4] # Output: [2,4,3,1] # The",
"array A of non-negative integers, return an array consisting of all the even",
"array consisting of all the even elements of A, followed by all the",
"# 1 <= A.length <= 5000 # 0 <= A[i] <= 5000 class",
"[2,4,3,1] # The outputs [4,2,3,1], [2,4,1,3], and [4,2,1,3] would also be accepted. #",
"# # Note: # # 1 <= A.length <= 5000 # 0 <=",
"consisting of all the even elements of A, followed by all the odd",
"# Example 1: # # Input: [3,1,2,4] # Output: [2,4,3,1] # The outputs",
"<= A[i] <= 5000 class Solution: def sortArrayByParity(self, A): \"\"\" :type A: List[int]",
"Given an array A of non-negative integers, return an array consisting of all",
"<= 5000 # 0 <= A[i] <= 5000 class Solution: def sortArrayByParity(self, A):",
"non-negative integers, return an array consisting of all the even elements of A,",
"def sortArrayByParity(self, A): \"\"\" :type A: List[int] :rtype: List[int] \"\"\" output = []",
"i in A: if i % 2 == 0: output = [i] +",
"Array # Given an array A of non-negative integers, return an array consisting",
"of A, followed by all the odd elements of A. # # You",
"integers, return an array consisting of all the even elements of A, followed",
"i % 2 == 0: output = [i] + output else: output +=",
"array that satisfies this condition. # # # # Example 1: # #",
"A, followed by all the odd elements of A. # # You may",
"all the even elements of A, followed by all the odd elements of",
"odd elements of A. # # You may return any answer array that",
"# Given an array A of non-negative integers, return an array consisting of",
"# You may return any answer array that satisfies this condition. # #",
"A.length <= 5000 # 0 <= A[i] <= 5000 class Solution: def sortArrayByParity(self,",
"0 <= A[i] <= 5000 class Solution: def sortArrayByParity(self, A): \"\"\" :type A:",
"A. # # You may return any answer array that satisfies this condition.",
"all the odd elements of A. # # You may return any answer",
"sortArrayByParity(self, A): \"\"\" :type A: List[int] :rtype: List[int] \"\"\" output = [] for",
"the even elements of A, followed by all the odd elements of A.",
"that satisfies this condition. # # # # Example 1: # # Input:",
"[2,4,1,3], and [4,2,1,3] would also be accepted. # # # Note: # #",
"# # 1 <= A.length <= 5000 # 0 <= A[i] <= 5000",
"A[i] <= 5000 class Solution: def sortArrayByParity(self, A): \"\"\" :type A: List[int] :rtype:",
"% 2 == 0: output = [i] + output else: output += [i]",
"By Parity.py # Array # Given an array A of non-negative integers, return"
] |