input stringlengths 2.65k 237k | output stringclasses 1
value |
|---|---|
Peppermint Pattie
19092: [], # Candies, TWIZZLERS NIBS CHERRY BITS
19093: [], # Candies, SYMPHONY Milk Chocolate Bar
19094: [], # Desserts, flan, caramel custard, prepared-from-recipe
19095: ["Vanilla ice cream"], # Ice creams, vanilla
19096: [], # Ice creams, vanilla, light, soft-serve
19097: [], # Sherbet, orange
19098: [], # Candies, 5TH AVENUE Candy Bar
19099: [], # Candies, fondant, prepared-from-recipe
19100: [], # Candies, fudge, chocolate, prepared-from-recipe
19101: [], # Candies, fudge, chocolate, with nuts, prepared-from-recipe
19102: [], # Candies, fudge, peanut butter, prepared-from-recipe
19103: [], # Candies, fudge, vanilla, prepared-from-recipe
19104: [], # Candies, fudge, vanilla with nuts
19105: [], # Candies, NESTLE, GOOBERS Chocolate Covered Peanuts
19106: [], # Candies, gumdrops, starch jelly pieces
19107: [], # Candies, hard
19108: [], # Candies, jellybeans
19109: [], # Candies, KIT KAT Wafer Bar
19110: [], # Candies, KRACKEL Chocolate Bar
19111: [], # Candies, NESTLE, BABY RUTH Bar
19112: [], # Candies, TWIZZLERS Strawberry Twists Candy
19113: [], # Syrups, table blends, pancake, with butter
19114: [], # Ice creams, chocolate, light
19115: [], # Candies, MARS SNACKFOOD US, MARS Almond Bar
19116: [], # Candies, marshmallows
19117: [], # Candies, halavah, plain
19118: [], # Candies, NESTLE, OH HENRY! Bar
19119: [], # Candies, NESTLE, CHUNKY Bar
19120: [], # Candies, milk chocolate
19121: [], # Puddings, banana, dry mix, instant, prepared with 2% milk
19122: [], # Puddings, banana, dry mix, regular, prepared with 2% milk
19123: [], # Puddings, chocolate, dry mix, instant, prepared with 2% milk
19124: [], # Baking chocolate, mexican, squares
19125: [], # Chocolate-flavored hazelnut spread
19126: [], # Candies, milk chocolate coated peanuts
19127: [], # Candies, milk chocolate coated raisins
19128: [], # Syrups, table blends, pancake, reduced-calorie
19129: [], # Syrups, table blends, pancake
19130: [], # Candies, HERSHEY'S POT OF GOLD Almond Bar
19132: [], # Candies, milk chocolate, with almonds
19134: [], # Candies, milk chocolate, with rice cereal
19135: [], # Candies, MARS SNACKFOOD US, MILKY WAY Bar
19136: [], # Candies, HERSHEY'S SKOR Toffee Bar
19137: [], # Toppings, strawberry
19138: [], # Candies, truffles, prepared-from-recipe
19139: [], # Baking chocolate, MARS SNACKFOOD US, M&M's Semisweet Chocolate Mini Baking Bits
19140: [], # Candies, MARS SNACKFOOD US, M&M's Peanut Chocolate Candies
19141: [], # Candies, MARS SNACKFOOD US, M&M's Milk Chocolate Candies
19142: [], # Candies, MOUNDS Candy Bar
19143: [], # Candies, MR. GOODBAR Chocolate Bar
19144: [], # Candies, NESTLE, 100 GRAND Bar
19145: [], # Candies, NESTLE, CRUNCH Bar and Dessert Topping
19146: [], # Baking chocolate, MARS SNACKFOOD US, M&M's Milk Chocolate Mini Baking Bits
19147: [], # Candies, peanut bar
19148: [], # Candies, peanut brittle, prepared-from-recipe
19149: [], # Candies, NESTLE, RAISINETS Chocolate Covered Raisins
19150: [], # Candies, REESE'S Peanut Butter Cups
19151: [], # Candies, REESE'S PIECES Candy
19152: [], # Candies, ROLO Caramels in Milk Chocolate
19153: [], # Candies, NESTLE, AFTER EIGHT Mints
19154: [], # Candies, sesame crunch
19155: [], # Candies, MARS SNACKFOOD US, SNICKERS Bar
19156: [], # Candies, MARS SNACKFOOD US, STARBURST Fruit Chews, Original fruits
19157: [], # Candies, MARS SNACKFOOD US, M&M's MINIs Milk Chocolate Candies
19159: [], # Candies, MARS SNACKFOOD US, 3 MUSKETEERS Bar
19160: [], # Candies, MARS SNACKFOOD US, TWIX Caramel Cookie Bars
19161: [], # Candies, MARS SNACKFOOD US, TWIX Peanut Butter Cookie Bars
19162: [], # Candies, WHATCHAMACALLIT Candy Bar
19163: [], # Chewing gum
19164: [], # Candies, SPECIAL DARK Chocolate Bar
19165: [], # Cocoa, dry powder, unsweetened
19166: [], # Cocoa, dry powder, unsweetened, processed with alkali
19168: [], # Desserts, egg custard, baked, prepared-from-recipe
19169: [], # Egg custards, dry mix
19170: [], # Egg custards, dry mix, prepared with whole milk
19171: [], # Cocoa, dry powder, unsweetened, HERSHEY'S European Style Cocoa
19172: [], # Gelatin desserts, dry mix
19173: [], # Gelatin desserts, dry mix, prepared with water
19175: [], # Gelatin desserts, dry mix, reduced calorie, with aspartame
19176: [], # Gelatin desserts, dry mix, reduced calorie, with aspartame, prepared with water
19177: [], # Gelatins, dry powder, unsweetened
19181: [], # Candies, YORK BITES
19182: [], # Desserts, mousse, chocolate, prepared-from-recipe
19183: [], # Puddings, chocolate, ready-to-eat
19184: [], # Puddings, chocolate, dry mix, instant
19185: [], # Puddings, chocolate, dry mix, instant, prepared with whole milk
19186: [], # Desserts, apple crisp, prepared-from-recipe
19187: [], # Flan, caramel custard, dry mix
19188: [], # Puddings, chocolate, dry mix, regular
19189: [], # Puddings, chocolate, dry mix, regular, prepared with whole milk
19190: [], # Puddings, chocolate, dry mix, regular, prepared with 2% milk
19191: [], # Puddings, coconut cream, dry mix, instant, prepared with 2% milk
19193: [], # Puddings, rice, ready-to-eat
19194: [], # Puddings, rice, dry mix
19195: [], # Puddings, rice, dry mix, prepared with whole milk
19198: [], # Puddings, tapioca, dry mix
19199: [], # Puddings, tapioca, dry mix, prepared with whole milk
19201: [], # Puddings, vanilla, ready-to-eat
19202: [], # Puddings, vanilla, dry mix, instant
19203: [], # Puddings, vanilla, dry mix, instant, prepared with whole milk
19204: [], # Puddings, lemon, dry mix, instant, prepared with 2% milk
19205: [], # Egg custards, dry mix, prepared with 2% milk
19206: [], # Puddings, vanilla, dry mix, regular
19207: [], # Puddings, vanilla, dry mix, regular, prepared with whole milk
19208: [], # Puddings, rice, dry mix, prepared with 2% milk
19209: [], # Puddings, tapioca, dry mix, prepared with 2% milk
19212: [], # Puddings, vanilla, dry mix, regular, prepared with 2% milk
19213: [], # Rennin, chocolate, dry mix, prepared with 2% milk
19214: [], # Rennin, vanilla, dry mix, prepared with 2% milk
19216: [], # Candies, praline, prepared-from-recipe
19217: [], # Frozen novelties, ice type, fruit, no sugar added
19218: [], # Puddings, tapioca, ready-to-eat
19219: [], # Puddings, coconut cream, dry mix, regular, prepared with 2% milk
19220: [], # Desserts, rennin, chocolate, dry mix
19221: [], # Rennin, chocolate, dry mix, prepared with whole milk
19222: [], # Desserts, rennin, vanilla, dry mix
19223: [], # Rennin, vanilla, dry mix, prepared with whole milk
19225: [], # Desserts, rennin, tablets, unsweetened
19226: [], # Frostings, chocolate, creamy, ready-to-eat
19227: [], # Frostings, coconut-nut, ready-to-eat
19228: [], # Frostings, cream cheese-flavor, ready-to-eat
19230: [], # Frostings, vanilla, creamy, ready-to-eat
19231: [], # Flan, caramel custard, dry mix, prepared with 2% milk
19232: [], # Flan, caramel custard, dry mix, prepared with whole milk
19233: [], # Puddings, vanilla, ready-to-eat, fat free
19234: [], # Puddings, tapioca, ready-to-eat, fat free
19235: [], # Puddings, chocolate, ready-to-eat, fat free
19236: [], # Candies, HERSHEY'S MILK CHOCOLATE WITH ALMOND BITES
19238: [], # Candies, REESE'S BITES
19239: [], # Candies, REESE'S NUTRAGEOUS Candy Bar
19240: [], # Frostings, chocolate, creamy, dry mix
19241: [], # Frostings, chocolate, creamy, dry mix, prepared with butter
19243: [], # Candies, HEATH BITES
19244: [], # Frostings, vanilla, creamy, dry mix
19246: [], # Frostings, white, fluffy, dry mix
19247: [], # Frostings, white, fluffy, dry mix, prepared with water
19248: [], # Candies, HERSHEY'S, ALMOND JOY BITES
19249: [], # Candies, HERSHEY, REESESTICKS crispy wafers, peanut butter, milk chocolate
19250: [], # Candies, HERSHEY, KIT KAT BIG KAT Bar
19252: [], # Candies, REESE'S, FAST BREAK, milk chocolate peanut butter and soft nougats
19254: [], # Candies, MARS SNACKFOOD US, DOVE Milk Chocolate
19255: [], # Candies, MARS SNACKFOOD US, DOVE Dark Chocolate
19256: [], # Candies, MARS SNACKFOOD US, MILKY WAY Caramels, milk chocolate covered
19258: [], # Candies, MARS SNACKFOOD US, MILKY WAY Caramels. dark chocolate covered
19260: [], # Ice creams, vanilla, light, no | |
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/blueprints/account/{name}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BlueprintResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_publics_blueprint(self, **kwargs):
"""
retrieve public and private (owned) blueprints
Ambari Blueprints are a declarative definition of a Hadoop cluster. With a Blueprint, you specify a stack, the component layout and the configurations to materialize a Hadoop cluster instance. Hostgroups defined in blueprints can be associated to different templates, thus you can spin up a highly available cluster running on different instance types. This will give you the option to group your Hadoop services based on resource needs (e.g. high I/O, CPU or memory) and create an infrastructure which fits your workload best.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_publics_blueprint(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[BlueprintResponse]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_publics_blueprint_with_http_info(**kwargs)
else:
(data) = self.get_publics_blueprint_with_http_info(**kwargs)
return data
def get_publics_blueprint_with_http_info(self, **kwargs):
"""
retrieve public and private (owned) blueprints
Ambari Blueprints are a declarative definition of a Hadoop cluster. With a Blueprint, you specify a stack, the component layout and the configurations to materialize a Hadoop cluster instance. Hostgroups defined in blueprints can be associated to different templates, thus you can spin up a highly available cluster running on different instance types. This will give you the option to group your Hadoop services based on resource needs (e.g. high I/O, CPU or memory) and create an infrastructure which fits your workload best.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_publics_blueprint_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: list[BlueprintResponse]
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_publics_blueprint" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/blueprints/account', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[BlueprintResponse]',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_private_blueprint(self, **kwargs):
"""
create blueprint as private resource
Ambari Blueprints are a declarative definition of a Hadoop cluster. With a Blueprint, you specify a stack, the component layout and the configurations to materialize a Hadoop cluster instance. Hostgroups defined in blueprints can be associated to different templates, thus you can spin up a highly available cluster running on different instance types. This will give you the option to group your Hadoop services based on resource needs (e.g. high I/O, CPU or memory) and create an infrastructure which fits your workload best.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_private_blueprint(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BlueprintRequest body:
:return: BlueprintResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.post_private_blueprint_with_http_info(**kwargs)
else:
(data) = self.post_private_blueprint_with_http_info(**kwargs)
return data
def post_private_blueprint_with_http_info(self, **kwargs):
"""
create blueprint as private resource
Ambari Blueprints are a declarative definition of a Hadoop cluster. With a Blueprint, you specify a stack, the component layout and the configurations to materialize a Hadoop cluster instance. Hostgroups defined in blueprints can be associated to different templates, thus you can spin up a highly available cluster running on different instance types. This will give you the option to group your Hadoop services based on resource needs (e.g. high I/O, CPU or memory) and create an infrastructure which fits your workload best.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_private_blueprint_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BlueprintRequest body:
:return: BlueprintResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_private_blueprint" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = ['tokenAuth']
return self.api_client.call_api('/v1/blueprints/user', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='BlueprintResponse',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def post_public_blueprint(self, **kwargs):
"""
create blueprint as public resource
Ambari Blueprints are a declarative definition of a Hadoop cluster. With a Blueprint, you specify a stack, the component layout and the configurations to materialize a Hadoop cluster instance. Hostgroups defined in blueprints can be associated to different templates, thus you can spin up a highly available cluster running on different instance types. This will give you the option to group your Hadoop services based on resource needs (e.g. high I/O, CPU or memory) and create an infrastructure which fits your workload best.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_public_blueprint(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BlueprintRequest body:
:return: BlueprintResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.post_public_blueprint_with_http_info(**kwargs)
else:
(data) = self.post_public_blueprint_with_http_info(**kwargs)
return data
def post_public_blueprint_with_http_info(self, **kwargs):
"""
create blueprint as public resource
Ambari Blueprints are a declarative definition of a Hadoop cluster. With a Blueprint, you specify a stack, the component layout and the configurations to materialize a Hadoop cluster instance. Hostgroups defined in blueprints can be associated to different templates, thus you can spin up a highly available cluster running on different instance types. This will give you the option to group your Hadoop services based on resource needs (e.g. high I/O, CPU or memory) and create an infrastructure which fits your workload best.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.post_public_blueprint_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param BlueprintRequest body:
:return: BlueprintResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_public_blueprint" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = | |
maya.OpenMaya as OpenMaya
fpsDict = {}
_bases = {"game": (15.0, 'kGames'), # 5
"film": (24.0, 'kFilm'), # 6
"pal": (25.0, 'kPALFrame'), # 7
"ntsc": (30.0, 'kNTSCFrame'), # 8
"show": (48.0, 'kShowScan'), # 9
"palf": (50.0, 'kPALField'), # 10
"ntscf": (60.0, 'kNTSCField'), # 11
"2fps": (2.0, 'k2FPS'), # 12
"3fps": (3.0, 'k3FPS'), # 13
"4fps": (4.0, 'k4FPS'), # 14
"5fps": (5.0, 'k5FPS'), # 15
"6fps": (6.0, 'k6FPS'), # 16
"8fps": (8.0, 'k8FPS'), # 17
"10fps": (10.0, 'k10FPS'), # 18
"12fps": (12.0, 'k12FPS'), # 19
"16fps": (16.0, 'k16FPS'), # 20
"20fps": (20.0, 'k20FPS'), # 21
"40fps": (40.0, 'k40FPS'), # 22
"75fps": (75.0, 'k75FPS'), # 23
"80fps": (80.0, 'k80FPS'), # 24
"100fps": (100.0, 'k100FPS'), # 25
"120fps": (120.0, 'k120FPS'), # 26
"125fps": (125.0, 'k125FPS'), # 27
"150fps": (150.0, 'k150FPS'), # 28
"200fps": (200.0, 'k200FPS'), # 29
"240fps": (240.0, 'k240FPS'), # 30
"250fps": (250.0, 'k250FPS'), # 31
"300fps": (300.0, 'k300FPS'), # 32
"375fps": (375.0, 'k375FPS'), # 33
"400fps": (400.0, 'k400FPS'), # 34
"500fps": (500.0, 'k500FPS'), # 35
"600fps": (600.0, 'k600FPS'), # 36
"750fps": (750.0, 'k750FPS'), # 37
"1200fps": (1200.0, 'k1200FPS'), # 38
"1500fps": (1500.0, 'k1500FPS'), # 39
"2000fps": (2000.0, 'k2000FPS'), # 40
"3000fps": (3000.0, 'k3000FPS'), # 41
"6000fps": (6000.0, 'k6000FPS'), # 42
"23.976fps": (24.0 * 1000.0 / 1001.0, 'k23_976FPS'), # 43
"29.97fps": (30.0 * 1000.0 / 1001.0, 'k29_97FPS'), # 44
"29.97df": (30.0 * 1000.0 / 1001.0, 'k29_97DF'), # 45
"47.952fps": (48.0 * 1000.0 / 1001.0, 'k47_952FPS'), # 46
"59.94fps": (60.0 * 1000.0 / 1001.0, 'k59_94FPS'), # 47
"44100fps": (44100.0, 'k44100FPS'), # 48
"48000fps": (48000.0, 'k48000FPS')} # 49
# now in a try loop so that when the API bails due to MTime differences
# between Maya versions we're still ok
for fps, data in list(_bases.items()):
try:
api = getattr(OpenMaya.MTime, data[1])
if not truncate_float:
fpsDict[fps] = (data[0], api)
else:
fpsDict[fps] = (round(data[0], 3), api)
except:
log.debug("given fps doesn't exist in this build of Maya API")
if not value:
return fpsDict
# value comparison and conversion
if type(value) in [int, float]:
for k, v in list(fpsDict.items()):
if abs(float(v[0]) - float(value)) < 0.001:
if not asAPI:
return k
return v[1]
elif type(value) in [str, str]:
if not asAPI:
return fpsDict[value][0]
return fpsDict[value][1]
def getCurrentFPS(return_full_map=False):
'''
returns the current frames per second as a number, rather than a useless string
:param return_full_map: if True we return a dictionary of timeUnit:fps rather than the
current actual fps - useful for debugging
'''
if not return_full_map:
# do we just call the Maya `currentTimeUnitToFPS` ?
data = framerate_mapping()
return data[cmds.currentUnit(q=True, fullName=True, time=True)][0]
else:
# why remap? this is purely for consistency on older calls
data = {}
for k, v in list(framerate_mapping().items()):
data[k] = v[0]
return data
# -----------------------------------------------------------------------------------------
# MENU SETUPS ---
# -----------------------------------------------------------------------------------------
def menuSetup(parent='MayaWindow'):
# if exists remove all items, means we can update on the fly by restarting the Red9 pack
if cmds.menu('redNineMenuItemRoot', exists=True):
cmds.deleteUI('redNineMenuItemRoot')
log.info("Rebuilding Existing RedNine Menu")
# parent is an existing window with an existing menuBar?
if cmds.window(parent, exists=True):
if not cmds.window(parent, q=True, menuBar=True):
raise Exception('given parent for Red9 Menu has no menuBarlayout %s' % parent)
else:
cmds.menu('redNineMenuItemRoot', l="RedNine", p=parent, tearOff=True, allowOptionBoxes=True)
log.info('New Red9 Menu added to current window : %s' % parent)
# parent is a menuBar?
elif cmds.menuBarLayout(parent, exists=True):
cmds.menu('redNineMenuItemRoot', l='RedNine', p=parent, tearOff=True, allowOptionBoxes=True)
log.info('New Red9 Sound Menu added to current windows menuBar : %s' % parent)
# parent is an existing menu?
elif cmds.menu(parent, exists=True):
cmds.menuItem('redNineMenuItemRoot', l='RedNine', sm=True, p=parent)
log.info('new Red9 subMenu added to current Menu : %s' % parent)
else:
raise Exception('given parent for Red9 Menu is invalid %s' % parent)
try:
cmds.menuItem('redNineProRootItem',
l='PRO : PACK', sm=True, p='redNineMenuItemRoot', tearOff=True, i='red9.jpg')
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.red9_details + ' : ProPack', i='info_30.png',
c='Red9.setup.get_pro_pack()', p='redNineProRootItem')
# Holder Menus for Client code
if get_client_modules():
cmds.menuItem(divider=True, p='redNineMenuItemRoot')
for client in get_client_modules():
cmds.menuItem('redNineClient%sItem' % client,
l='CLIENT : %s' % client, sm=True, p='redNineMenuItemRoot', tearOff=True, i='red9.jpg')
cmds.menuItem(divider=True, p='redNineMenuItemRoot')
# Add the main Menu items
cmds.menuItem('redNineAnimItem',
l=LANGUAGE_MAP._MainMenus_.animation_toolkit,
ann=LANGUAGE_MAP._MainMenus_.animation_toolkit_ann,
p='redNineMenuItemRoot', echoCommand=True, i='pose_30.png',
c="import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.AnimationUI.show()")
cmds.menuItem('redNineSnapItem',
l=LANGUAGE_MAP._MainMenus_.simple_snap,
ann=LANGUAGE_MAP._MainMenus_.simple_snap_ann,
p='redNineMenuItemRoot', echoCommand=True, i='locationon_30.png',
c="import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.AnimFunctions.snap()")
cmds.menuItem('redNineSearchItem',
l=LANGUAGE_MAP._MainMenus_.searchui,
ann=LANGUAGE_MAP._MainMenus_.searchui_ann,
p='redNineMenuItemRoot', echoCommand=True, i='search_30.png',
c="import Red9.core.Red9_CoreUtils as r9Core;r9Core.FilterNode_UI.show()")
cmds.menuItem('redNineLockChnsItem',
l=LANGUAGE_MAP._MainMenus_.lockchannels,
ann=LANGUAGE_MAP._MainMenus_.lockchannels_ann,
p='redNineMenuItemRoot', echoCommand=True, i='lock_30.png',
c="import Red9.core.Red9_CoreUtils as r9Core;r9Core.LockChannels.UI.show()")
cmds.menuItem('redNineMetaUIItem',
l=LANGUAGE_MAP._MainMenus_.metanodeui,
ann=LANGUAGE_MAP._MainMenus_.metanodeui_ann,
p='redNineMenuItemRoot', echoCommand=True, i='meta_node_30.png',
c="import Red9.core.Red9_Meta as r9Meta;r9Meta.MClassNodeUI.show()")
cmds.menuItem('redNineReporterUIItem',
l=LANGUAGE_MAP._MainMenus_.scene_reviewer,
ann=LANGUAGE_MAP._MainMenus_.scene_reviewer_ann,
p='redNineMenuItemRoot', echoCommand=True, i='hand_with_pen_30.png',
c="import Red9.core.Red9_Tools as r9Tools;r9Tools.SceneReviewerUI.show()")
cmds.menuItem('redNineMoCapItem',
l=LANGUAGE_MAP._MainMenus_.mouse_mocap,
ann=LANGUAGE_MAP._MainMenus_.mouse_mocap_ann,
p='redNineMenuItemRoot', echoCommand=True,
c="import Red9.core.Red9_Tools as r9Tools;r9Tools.RecordAttrs.show()")
cmds.menuItem('redNineRandomizerItem',
l=LANGUAGE_MAP._MainMenus_.randomize_keyframes,
ann=LANGUAGE_MAP._MainMenus_.randomize_keyframes_ann,
p='redNineMenuItemRoot', echoCommand=True,
c="import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.RandomizeKeys.showOptions()")
cmds.menuItem('redNineFilterCurvesItem',
l=LANGUAGE_MAP._MainMenus_.interactive_curve_filter,
ann=LANGUAGE_MAP._MainMenus_.interactive_curve_filter_ann,
p='redNineMenuItemRoot', echoCommand=True,
c="import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.FilterCurves.show()")
cmds.menuItem('redNineMirrorUIItem',
l=LANGUAGE_MAP._MainMenus_.mirror_setup,
ann=LANGUAGE_MAP._MainMenus_.mirror_setup_ann,
p='redNineMenuItemRoot', echoCommand=True, i='mirror_30.png',
c="import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.MirrorSetup().show()")
cmds.menuItem('redNineCameraTrackItem', i='camera_30.png',
l='CameraTracker', sm=True, p='redNineMenuItemRoot')
cmds.menuItem('redNineCamerTrackFixedItem',
l=LANGUAGE_MAP._MainMenus_.camera_tracker_pan,
ann=LANGUAGE_MAP._MainMenus_.camera_tracker_pan_ann,
p='redNineCameraTrackItem', echoCommand=True,
c="from Red9.core.Red9_AnimationUtils import CameraTracker as camTrack;camTrack.cameraTrackView(fixed=True)")
if not mayaVersion() <= 2009:
cmds.menuItem(optionBox=True,
ann=LANGUAGE_MAP._MainMenus_.tracker_tighness_ann,
p='redNineCameraTrackItem', echoCommand=True,
c="from Red9.core.Red9_AnimationUtils import CameraTracker as camTrack;camTrack(fixed=True)._showUI()")
cmds.menuItem('redNineCamerTrackFreeItem',
l=LANGUAGE_MAP._MainMenus_.camera_tracker_track,
ann=LANGUAGE_MAP._MainMenus_.camera_tracker_track_ann,
p='redNineCameraTrackItem', echoCommand=True,
c="from Red9.core.Red9_AnimationUtils import CameraTracker as camTrack;camTrack.cameraTrackView(fixed=False)")
if not mayaVersion() <= 2009:
cmds.menuItem(optionBox=True,
ann=LANGUAGE_MAP._MainMenus_.tracker_tighness_ann,
p='redNineCameraTrackItem', echoCommand=True,
c="from Red9.core.Red9_AnimationUtils import CameraTracker as camTrack;camTrack(fixed=False)._showUI()")
#cmds.menuItem('redNineSoundSubItem', l='Red9 Sound', sm=True, p='redNineMenuItemRoot', tearOff=True, i='red9.jpg')
addAudioMenu(parent='redNineMenuItemRoot', rootMenu='redNineSoundSubItem')
cmds.menuItem(divider=True, p='redNineMenuItemRoot')
cmds.menuItem('redNineAnimBndItem',
l=LANGUAGE_MAP._MainMenus_.animation_binder,
ann=LANGUAGE_MAP._MainMenus_.animation_binder_ann,
p='redNineMenuItemRoot', echoCommand=True, i='workflow_30.png',
c="import Red9.core.AnimationBinder as animBnd;animBnd.AnimBinderUI().Show()")
cmds.menuItem(divider=True, p='redNineMenuItemRoot')
cmds.menuItem('redNineHomepageItem',
l=LANGUAGE_MAP._MainMenus_.red9_homepage,
ann=LANGUAGE_MAP._MainMenus_.red9_homepage_ann,
p='redNineMenuItemRoot', echoCommand=True, i='domain_30.png',
c="Red9.setup.red9_website_home()")
cmds.menuItem('redNineBlogItem',
l=LANGUAGE_MAP._MainMenus_.red9_blog,
ann=LANGUAGE_MAP._MainMenus_.red9_blog_ann,
p='redNineMenuItemRoot', echoCommand=True, i='blogspot_30.png',
c="Red9.setup.red9_blog()")
cmds.menuItem('redNineVimeoItem',
l=LANGUAGE_MAP._MainMenus_.red9_vimeo,
ann=LANGUAGE_MAP._MainMenus_.red9_vimeo_ann,
p='redNineMenuItemRoot', echoCommand=True, i='vimeo_30.png',
c="Red9.setup.red9_vimeo()")
cmds.menuItem('redNineFacebookItem',
l=LANGUAGE_MAP._MainMenus_.red9_facebook,
ann=LANGUAGE_MAP._MainMenus_.red9_facebook_ann,
p='redNineMenuItemRoot', echoCommand=True, i='facebook_30.png',
c="Red9.setup.red9_facebook()")
cmds.menuItem('redNineTwitterItem',
l=LANGUAGE_MAP._MainMenus_.red9_twitter,
ann=LANGUAGE_MAP._MainMenus_.red9_twitter_ann,
p='redNineMenuItemRoot', echoCommand=True, i='twitter_30.png',
c="Red9.setup.red9_twitter()")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.red9_details, i='info_30.png',
c='Red9.setup.red9ContactInfo()', p='redNineMenuItemRoot')
cmds.menuItem(divider=True, p='redNineMenuItemRoot')
cmds.menuItem('redNineAPIDocItem',
l=LANGUAGE_MAP._MainMenus_.red9_api_docs,
ann=LANGUAGE_MAP._MainMenus_.red9_api_docs_ann,
p='redNineMenuItemRoot', echoCommand=True, i='api_30.png',
c="Red9.setup.red9_apidocs()")
cmds.menuItem('redNineDebuggerItem', l=LANGUAGE_MAP._MainMenus_.red9_debugger, sm=True, i='bug_30.png', p='redNineMenuItemRoot')
cmds.menuItem('redNineLostAnimItem', p='redNineDebuggerItem',
l=LANGUAGE_MAP._MainMenus_.reconnect_anim,
ann=LANGUAGE_MAP._MainMenus_.reconnect_anim_ann,
echoCommand=True, c="import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.ReconnectAnimData().show()")
cmds.menuItem('redNineOpenCrashItem', p='redNineDebuggerItem',
l=LANGUAGE_MAP._MainMenus_.open_last_crash,
ann=LANGUAGE_MAP._MainMenus_.open_last_crash_ann,
echoCommand=True, c="import Red9.core.Red9_General as r9General;r9General.os_openCrashFile()")
cmds.menuItem(divider=True, p='redNineDebuggerItem')
cmds.menuItem('redNineDebugItem',
l=LANGUAGE_MAP._MainMenus_.systems_debug,
ann=LANGUAGE_MAP._MainMenus_.systems_debug_ann,
echoCommand=True, c="Red9.core._setlogginglevel_debug()")
cmds.menuItem('redNineInfoItem',
l=LANGUAGE_MAP._MainMenus_.systems_info,
ann=LANGUAGE_MAP._MainMenus_.systems_info_ann,
echoCommand=True, c="Red9.core._setlogginglevel_info()")
cmds.menuItem(divider=True, p='redNineDebuggerItem')
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.individual_debug, sm=True, p='redNineDebuggerItem')
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.debug + " : r9Core",
ann=LANGUAGE_MAP._MainMenus_.individual_debug_ann,
echoCommand=True, c="Red9.core._setlogginglevel_debug('r9Core')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.debug + " : r9Meta",
ann=LANGUAGE_MAP._MainMenus_.individual_debug_ann,
echoCommand=True, c="Red9.core._setlogginglevel_debug('r9Meta')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.debug + " : r9Anim",
ann=LANGUAGE_MAP._MainMenus_.individual_debug_ann,
echoCommand=True, c="Red9.core._setlogginglevel_debug('r9Anim')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.debug + " : r9Tools",
ann=LANGUAGE_MAP._MainMenus_.individual_debug_ann,
echoCommand=True, c="Red9.core._setlogginglevel_debug('r9Tools')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.debug + " : r9Pose",
ann=LANGUAGE_MAP._MainMenus_.individual_debug_ann,
echoCommand=True, c="Red9.core._setlogginglevel_debug('r9Pose')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.debug + " : r9General",
ann=LANGUAGE_MAP._MainMenus_.individual_debug_ann,
echoCommand=True, c="Red9.core._setlogginglevel_debug('r9General')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.debug + " : r9Audio",
ann=LANGUAGE_MAP._MainMenus_.individual_debug_ann,
echoCommand=True, c="Red9.core._setlogginglevel_debug('r9Audio')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.individual_info, sm=True, p='redNineDebuggerItem')
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.info + " : r9Core",
ann=LANGUAGE_MAP._MainMenus_.individual_info_ann,
echoCommand=True, c="Red9.core._setlogginglevel_info('r9Core')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.info + " : r9Meta",
ann=LANGUAGE_MAP._MainMenus_.individual_info_ann,
echoCommand=True, c="Red9.core._setlogginglevel_info('r9Meta')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.info + " : r9Anim",
ann=LANGUAGE_MAP._MainMenus_.individual_info_ann,
echoCommand=True, c="Red9.core._setlogginglevel_info('r9Anim')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.info + " : r9Tools",
ann=LANGUAGE_MAP._MainMenus_.individual_info_ann,
echoCommand=True, c="Red9.core._setlogginglevel_info('r9Tools')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.info + " : r9Pose",
ann=LANGUAGE_MAP._MainMenus_.individual_info_ann,
echoCommand=True, c="Red9.core._setlogginglevel_info('r9Pose')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.info + " : r9General",
ann=LANGUAGE_MAP._MainMenus_.individual_info_ann,
echoCommand=True, c="Red9.core._setlogginglevel_info('r9General')")
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.info + " : r9Audio",
ann=LANGUAGE_MAP._MainMenus_.individual_info_ann,
echoCommand=True, c="Red9.core._setlogginglevel_info('r9Audio')")
cmds.menuItem(divider=True, p='redNineDebuggerItem')
cmds.menuItem('redNineReloadItem', l=LANGUAGE_MAP._MainMenus_.systems_reload, p='redNineDebuggerItem',
ann=LANGUAGE_MAP._MainMenus_.systems_reload_ann,
echoCommand=True, c=reload_Red9)
cmds.menuItem(divider=True, p='redNineDebuggerItem')
for language in get_language_maps():
cmds.menuItem(l=LANGUAGE_MAP._MainMenus_.language + " : %s" % language, c=partial(set_language, language), p='redNineDebuggerItem')
except:
raise Exception('Unable to parent Red9 Menu to given parent %s' % parent)
def addToMayaMenus():
'''
Red9 Additions to the Maya Menu's themselves, additions to the timeSlider, fileMenu ETC..
'''
try:
# fileMenu additions
if not cmds.menuItem('redNineOpenFolderItem', q=True, ex=True):
mainFileMenu = mel.eval("string $f=$gMainFileMenu")
if not cmds.menu(mainFileMenu, q=True, ni=True):
mel.eval('buildFileMenu()')
cmds.menuItem(divider=True, p=mainFileMenu)
cmds.menuItem('redNineCopyPathItem',
l=LANGUAGE_MAP._MainMenus_.copy_to_clipboard,
ann=LANGUAGE_MAP._MainMenus_.copy_to_clipboard_ann,
p=mainFileMenu,
echoCommand=True,
c="import Red9.core.Red9_General as r9General;import pyperclip;pyperclip.copy(r9General.sceneName())")
# c="import maya.cmds as cmds;import Red9.core.Red9_General as r9General;r9General.Clipboard.setText(cmds.file(q=True,sn=True))")
cmds.menuItem('redNineOpenFolderItem',
l=LANGUAGE_MAP._MainMenus_.open_in_explorer,
ann=LANGUAGE_MAP._MainMenus_.open_in_explorer_ann,
p=mainFileMenu,
echoCommand=True,
c="import maya.cmds as cmds;import Red9.core.Red9_General as r9General;r9General.os_OpenFileDirectory(cmds.file(q=True,sn=True))")
if has_pro_pack():
cmds.menuItem(divider=True, p=mainFileMenu)
cmds.menuItem('redNineSaver9AnimItem',
l=LANGUAGE_MAP._MainMenus_.save_r9anim,
ann=LANGUAGE_MAP._MainMenus_.save_r9anim_ann,
p=mainFileMenu,
echoCommand=True,
c="from Red9.pro_pack import Pro_MenuStubs;Pro_MenuStubs('r9anim_save_complete')")
cmds.menuItem('redNineOpenr9AnimItem',
l=LANGUAGE_MAP._MainMenus_.open_r9anim,
ann=LANGUAGE_MAP._MainMenus_.open_r9anim_ann,
p=mainFileMenu,
echoCommand=True,
c="from Red9.pro_pack import Pro_MenuStubs;Pro_MenuStubs('r9anim_open_direct')")
cmds.menuItem('redNineImportr9AnimItem',
l=LANGUAGE_MAP._MainMenus_.import_r9anim,
ann=LANGUAGE_MAP._MainMenus_.import_r9anim_ann,
p=mainFileMenu,
echoCommand=True,
c="from Red9.pro_pack import Pro_MenuStubs;Pro_MenuStubs('r9anim_import_direct')")
# timeSlider additions
if not cmds.menuItem('redNineTimeSliderCollapseItem', q=True, ex=True):
if mayaVersion >= 2011:
mel.eval('updateTimeSliderMenu TimeSliderMenu')
TimeSliderMenu = 'TimeSliderMenu'
cmds.menuItem(divider=True, p=TimeSliderMenu)
cmds.menuItem(subMenu=True, label=LANGUAGE_MAP._MainMenus_.range_submenu, p=TimeSliderMenu)
cmds.menuItem(label=LANGUAGE_MAP._MainMenus_.selectkeys_timerange,
ann=LANGUAGE_MAP._MainMenus_.selectkeys_timerange_ann,
c='import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.selectKeysByRange()')
cmds.menuItem(label=LANGUAGE_MAP._MainMenus_.setrangetoo,
ann=LANGUAGE_MAP._MainMenus_.setrangetoo_ann,
c='import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.setTimeRangeToo(bounds_only=True)')
cmds.menuItem(label=LANGUAGE_MAP._MainMenus_.setrangetoo_internal,
ann=LANGUAGE_MAP._MainMenus_.setrangetoo_internal_ann,
c='import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.setTimeRangeToo(bounds_only=False)')
cmds.menuItem(divider=True, p=TimeSliderMenu)
cmds.menuItem('redNineTimeSliderCollapseItem', label=LANGUAGE_MAP._MainMenus_.collapse_time,
ann=LANGUAGE_MAP._MainMenus_.collapse_time_ann,
c='import Red9.core.Red9_CoreUtils as r9Core;r9Core.timeOffset_collapseUI()',
p=TimeSliderMenu)
cmds.menuItem(subMenu=True, label=LANGUAGE_MAP._MainMenus_.insert_padding, p=TimeSliderMenu)
cmds.menuItem(label=LANGUAGE_MAP._MainMenus_.pad_selected,
ann=LANGUAGE_MAP._MainMenus_.pad_selected_ann,
c='import Red9.core.Red9_CoreUtils as r9Core;r9Core.timeOffset_addPadding(scene=False)')
cmds.menuItem(label=LANGUAGE_MAP._MainMenus_.pad_full_scene,
ann=LANGUAGE_MAP._MainMenus_.pad_full_scene_ann,
c='import Red9.core.Red9_CoreUtils as r9Core;r9Core.timeOffset_addPadding(scene=True)')
cmds.menuItem(label=LANGUAGE_MAP._MainMenus_.pad_mrigs,
ann=LANGUAGE_MAP._MainMenus_.pad_mrigs_ann,
c='import Red9.core.Red9_CoreUtils as r9Core;r9Core.timeOffset_addPadding(scene=False,mRigs=True)')
cmds.menuItem(subMenu=True, label=LANGUAGE_MAP._MainMenus_.inverse_anim, p=TimeSliderMenu)
cmds.menuItem(label=LANGUAGE_MAP._MainMenus_.inverse_selected,
ann=LANGUAGE_MAP._MainMenus_.inverse_selected_ann,
c='import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.AnimFunctions.inverseAnimCurves(mode="object", mRigs=False)')
cmds.menuItem(label=LANGUAGE_MAP._MainMenus_.inverse_mrigs,
ann=LANGUAGE_MAP._MainMenus_.inverse_mrigs_ann,
c='import Red9.core.Red9_AnimationUtils as r9Anim;r9Anim.AnimFunctions.inverseAnimCurves(mode="object", mRigs=True)')
if has_pro_pack():
cmds.menuItem(divider=True, p=TimeSliderMenu)
cmds.menuItem(subMenu=True, label='Red9 PRO: Timecode', p=TimeSliderMenu, i='red9.jpg')
cmds.menuItem(label='PRO: Toggle Timecode', i='red9.jpg',
ann='Toggle the timeline to view time in timecode or frame',
c="from Red9.pro_pack import r9pro;r9pro.r9import('r9paudio');import r9paudio;r9paudio.timecode_maya_toggle_timeline()")
cmds.menuItem(divider=True)
cmds.menuItem(label='PRO: Set Maya Production Timecode', i='red9.jpg',
ann='set the | |
python list to D list
print " te"+str(teCounter)+".pathRight = [\"b.subject\"];"
print " te"+str(teCounter)+".checkedString = \"" + comparedCompoundType + "\";"
print " te"+str(teCounter-1)+".children ~= te"+str(teCounter)+";"
print " "
teCounter+=1
if isinstance(premiseB, tuple) and not isPlaceholder(premiseB[2]):
comparedCompoundType = premiseB[2][0]
print " shared TrieElement te"+str(teCounter)+" = new shared TrieElement(TrieElement.EnumType.WALKCHECKCOMPOUND);"
print " te"+str(teCounter)+".pathLeft = [];" # print python list to D list
print " te"+str(teCounter)+".pathRight = [\"b.predicate\"];"
print " te"+str(teCounter)+".checkedString = \"" + comparedCompoundType + "\";"
print " te"+str(teCounter-1)+".children ~= te"+str(teCounter)+";"
print " "
teCounter+=1
for iSamePremiseTerms in samePremiseTerms: # need to iterate because there can be multiple terms which have to be the same
print " shared TrieElement te"+str(teCounter)+" = new shared TrieElement(TrieElement.EnumType.WALKCOMPARE);"
print " te"+str(teCounter)+".pathLeft = "+ convertPathToDSrc( iSamePremiseTerms[0] ) +";" # print python list to D list
print " te"+str(teCounter)+".pathRight = "+ convertPathToDSrc( iSamePremiseTerms[1] ) +";" # print python list to D list
print " te"+str(teCounter-1)+".children ~= te"+str(teCounter)+";"
print " "
teCounter+=1
hasIntervalT = "t" in pathsPremiseA or "t" in pathsPremiseB
hasIntervalZ = "z" in pathsPremiseA or "z" in pathsPremiseB
if hasIntervalT: # do we need to emit code for the computation of the interval(s)?
print " shared TrieElement te"+str(teCounter)+" = new shared TrieElement(TrieElement.EnumType.LOADINTERVAL);"
print " te"+str(teCounter)+".stringPayload = \"premiseT\";"
print " te"+str(teCounter)+".path = "+convertPathToDSrc(retPathOfName("t"))+";"
print " te"+str(teCounter-1)+".children ~= te"+str(teCounter)+";"
print " "
teCounter+=1
if hasIntervalZ: # do we need to emit code for the computation of the interval(s)?
print " shared TrieElement te"+str(teCounter)+" = new shared TrieElement(TrieElement.EnumType.LOADINTERVAL);"
print " te"+str(teCounter)+".stringPayload = \"premiseZ\";"
print " te"+str(teCounter)+".path = "+convertPathToDSrc(retPathOfName("z"))+";"
print " te"+str(teCounter-1)+".children ~= te"+str(teCounter)+";"
print " "
teCounter+=1
if intervalProjection != "": # do we need to emit code for the computation of the interval(s)?
if intervalProjection == "IntervalProjection(t,z)":
print " shared TrieElement te"+str(teCounter)+" = new shared TrieElement(TrieElement.EnumType.INTERVALPROJECTION);"
print " te"+str(teCounter)+".stringPayload = \""+intervalProjection+"\";"
print " te"+str(teCounter-1)+".children ~= te"+str(teCounter)+";"
print " "
teCounter+=1
else:
raise Exception("Unknown type of interval projection (not implemented)!")
print " shared TrieElement teX = new shared TrieElement(TrieElement.EnumType.EXEC);"
print " teX.fp = &derive"+str(staticFunctionCounter)+";"
print " te"+str(teCounter-1)+".children ~= teX;"
print " "
print " addToTrieRec(&rootTries, te0); //rootTries ~= te0;"
print "}"
print "\n"
teCounter+=1
derivationFunctionsSrc+= "static void derive"+str(staticFunctionCounter)+"(shared Sentence aSentence, shared Sentence bSentence, Sentences resultSentences, shared TrieElement trieElement, TrieContext *trieCtx) {\n"
derivationFunctionsSrc+= " assert(!(aSentence.isQuestion() && bSentence.isQuestion()), \"Invalid derivation : question-question\");\n"
derivationFunctionsSrc+= " \n"
derivationFunctionsSrc+= " \n"
derivationFunctionsSrc+= " bool hasConclusionTruth = !(aSentence.isQuestion() || bSentence.isQuestion());\n"
derivationFunctionsSrc+= " \n"
derivationFunctionsSrc+= " char derivationPunctation = aSentence.punctation;\n"
derivationFunctionsSrc+= " if (aSentence.isQuestion() || bSentence.isQuestion()) {\n"
derivationFunctionsSrc+= " derivationPunctation = '?';\n"
derivationFunctionsSrc+= " }\n"
derivationFunctionsSrc+= " \n"
derivationFunctionsSrc+= " auto a = aSentence.term;\n"
derivationFunctionsSrc+= " auto b = bSentence.term;\n"
derivationFunctionsSrc+= " \n"
derivationFunctionsSrc+= " auto conclusionSubj = "+conclusionSubjCode+";\n"
derivationFunctionsSrc+= " auto conclusionPred = "+conclusionPredCode+";\n"
# TODO< do allow it the conclusion copula is not a real copula >
derivationFunctionsSrc+= " if(!isSameRec(conclusionSubj, conclusionPred)) { // conclusion with same subject and predicate are forbidden by NAL\n"
derivationFunctionsSrc+= " shared Binary conclusionTerm = new shared Binary(\""+escape(conclusionCopula)+"\", conclusionSubj, conclusionPred);\n"
derivationFunctionsSrc+= " auto stamp = Stamp.merge(aSentence.stamp, bSentence.stamp);\n"
derivationFunctionsSrc+= " auto tv = hasConclusionTruth ? TruthValue.calc(\""+truth+"\", aSentence.truth, bSentence.truth) : null;\n"
if intervalProjection == "IntervalProjection(t,z)": # do we need to manipulate the tv for projection?
derivationFunctionsSrc+= " tv = new shared TruthValue(tv.freq, tv.conf * trieCtx.projectedTruthConfidence); // multiply confidence with confidence of projection\n"
derivationFunctionsSrc+= " if(hasConclusionTruth && tv.conf < 0.0001) {\n"
derivationFunctionsSrc+= " return; // conclusions with such a low conf are not relevant to the system\n"
derivationFunctionsSrc+= " }\n"
derivationFunctionsSrc+= " resultSentences.arr ~= new shared Sentence(derivationPunctation, conclusionTerm, tv, stamp);\n"
derivationFunctionsSrc+= " }\n"
derivationFunctionsSrc+= "}\n"
derivationFunctionsSrc+= "\n"
derivationFunctionsSrc+= "\n"
staticFunctionCounter+=1
# generate code for the rule
def gen(premiseA, premiseB, preconditions, conclusion, truthTuple, desire):
# helper to convert a premise from the temporal form to something which we can generate the code for
# ex: "A =/>(t) B" to "(&/, A, t) =/> B"
def convTerm2(term):
if isinstance(term, tuple):
if len(term) == 3:
(a, b, c) = term
if isPlaceholder(a): # normal handling for statement
(name0, copula, name1) = term # structure of conclusion term is encoded as tuple
if isinstance(copula, CWT):
# we have to rebuild the statement
return (("&/", name0, copula.tname), copula.copula, name1)
else:
return term # no special handling necessary because it is not a CWT
else: # special handling for compound
return term # because we only care about statements
else:
raise Exception("unhandled case") # we expect a tuple of length 3
else:
return term # no special treatment necessary
# unpack truthTuple into truth and intervalProjection
(truth, intervalProjection) = truthTuple
# TODO< print desire >
print "// rule "+convTermToStr(premiseA)+", "+convTermToStr(premiseB)+" " +str(preconditions)+ " |- " + convTermToStr(conclusion) + "\t\t(Truth:"+truth+intervalProjection+")"
genEmit(convTerm2(premiseA), convTerm2(premiseB), preconditions, convTerm2(conclusion), truthTuple, desire)
# each copula-type of form [AsymCop,SymCop,[ConjunctiveCops,DisjunctiveCop,MinusCops]]
CopulaTypes = [
["-->","<->",[["&"],"|",["-","~"]]],
["==>","<=>",[["&&"],"||",None]], #
[CWT("=/>","t"),CWT("</>","t"),[[CWT("&/","t"),"&|"],"||",None]], ##
["=|>","<|>",[["&/","&|"],"||",None]], #
#[CWT("=\>","t"),None ,[["&/","&|"],"||",None]] ###
]
# generate code for already implemented conversions?
genCodeComplex = False
print "// AUTOGEN: initializes and fills tries"
print "shared(TrieElement)[] initTrie() {"
print " shared(TrieElement)[] rootTries;"
for [copAsym,copSym,[ConjCops,DisjCop,MinusCops]] in CopulaTypes:
isTemporal = \
"|" in str(copAsym) or \
"/" in str(copAsym) or \
"\\" in str(copAsym)
bFOL = copAsym == "-->"
OmitForHOL = lambda str: str if bFOL else ""
# replace the potentially existing interval with a different name
def ival(obj,tname):
if isinstance(obj, CWT):
return obj.retWithReplacedTName(tname)
return obj.replace("t",tname)
copAsymZ = ival(copAsym, "z")
# TODO< implement inference generation function to generate code which accepts only one argument >
#print "(A "+copAsym+" B)\t\t\t\t\t|-\t(B "+ival(copAsym,"-t")+" A)\t\t(Truth:Conversion)"
if True:
#print "(A "+copAsym+" B),\t(B "+copAsymZ+" C)\t\t\t|-\t(A "+ival(copAsym,"t+z")+" C)\t\t(Truth:deduction"+OmitForHOL(", Desire:Strong")+")"
gen(("A",copAsym,"B"), ("B",copAsymZ,"C"), [] ,("A",ival(copAsym,"t+z"),"C"), ("deduction", ""), OmitForHOL("strong"))
copAsymHasTimeOffset = "/" in str(copAsym) or "\\" in str(copAsym)
IntervalProjection = "IntervalProjection(t,z)" if copAsymHasTimeOffset else ""
if True: # block
gen(("A", copAsym, "B"), ("C", copAsymZ, "B"), [], ("A", ival(copAsym, "t-z"), "C"), ("induction", IntervalProjection), OmitForHOL("weak"))
if True:
gen(("A", copAsym, "B"), ("A", copAsymZ, "C"), [], ("B", ival(copAsym, "t-z"), "C"), ("abduction", IntervalProjection), OmitForHOL("strong"))
if True: # added comparison
gen(("A", copAsym, "B"), ("C", copAsymZ, "B"), [],("A",ival(copSym, "t-z"),"C"), ("comparison", IntervalProjection), OmitForHOL("weak"))
gen(("A", copAsym, "B"), ("A", copAsymZ, "C"), [],("C",ival(copSym, "t-z"),"B"), ("comparison", IntervalProjection), OmitForHOL("weak"))
if copSym != None:
copSymZ = ival(copSym,"z")
if True:
#print "(A "+copSym+" B),\t(B "+copSymZ+" C)\t\t\t|-\t(A "+ival(copSym,"t+z")+" C)\t\t(Truth:resemblance"+OmitForHOL(", Desire:Strong")+")"
gen(("A",copSym,"B"),("B",copSymZ,"C"), [], ("A",ival(copSym,"t+z"),"C"), ("resemblance", ""), OmitForHOL("strong"))
#print "(A "+copAsym+" B),\t(C "+copSymZ+" B)\t\t\t|-\t(A "+copAsym+" C)\t\t(Truth:analogy"+IntervalProjection+OmitForHOL(", Desire:Strong")+")"
gen(("A",copAsym,"B"),("C",copSymZ,"B"), [], ("A",copAsym,"C"), ("analogy", IntervalProjection), OmitForHOL("strong"))
#print "(A "+copAsym+" B),\t(C "+copSymZ+" A)\t\t\t|-\t(C "+ival(copSym,"t+z")+" B)\t\t(Truth:analogy"+OmitForHOL(", Desire:Strong")+")"
gen(("A",copAsym,"B"),("C",copSymZ,"A"), [], ("C",ival(copSym,"t+z"),"B"), ("analogy", ""), OmitForHOL("strong"))
#print "(A "+copAsym+" B),\t(C "+copSymZ+" B)\t\t\t|-\t(A "+copSym+" C)\t\t(Truth:comparison"+IntervalProjection+OmitForHOL(", Desire:Weak")+")"
gen(("A", copAsym, "B"), ("C", copSymZ, "B"), [],("A",copSym,"C"), ("comparison", IntervalProjection), OmitForHOL("weak"))
#print "(A "+copAsym+" B),\t(A "+copSymZ+" C)\t\t\t|-\t(C "+copSym+" B)\t\t(Truth:comparison"+IntervalProjection+OmitForHOL(", Desire:Weak")+")"
gen(("A", copAsym, "B"), ("A",copSymZ,"C"), [],("C",copSym,"B"), ("comparison", IntervalProjection), OmitForHOL("weak"))
if isTemporal:
isBackward = copSym == None
for ConjCop in ConjCops:
predRel = ["Time:After(tB,tA)"] if copAsymHasTimeOffset else (["Time:Parallel(tB,tA)"] if "|" in str(copAsym) else [])
predConj = ["Time:After(tB,tA)"] if "/" in str(ConjCop) or "\\" in str(ConjCop) else (["Time:Parallel(tB,tA)"] if "|" in str(copAsym) else [])
forwardRel = "tB-tA" if "Time:After" in str(predRel) else ""
forwardConj = "tB-tA" if "Time:After" in str(predConj) else ""
if not isBackward:
pass
#print "A, \t\tB\t"+predRel+"\t|-\t(A "+copAsym.replace("t",forwardRel)+ "B)\t\t(Truth:Induction, Variables:Introduce$#)"
#print "A, \t\tB\t"+predRel+"\t|-\t(A "+copAsym.replace("t",forwardRel)+ "B)\t\t(Truth:Induction, Variables:Introduce$#)"
gen("A", "B", predRel,("A", ival(copAsym, forwardRel), "B"), ("induction", ""), "")
#print "A,\t\tB\t"+predConj+"\t|-\t("+ConjCop.replace("t",forwardConj)+" A B)\t\t(Truth:Intersection, Variables:Introduce#)"
#print "A\t\tB\t"+predRel+"\t|-\t(B "+copSym.replace("t",forwardRel)+"A)\t\t(Truth:Comparison, Variables:Introduce$#)"
else:
pass
#print "A, \t\tB\t"+predRel+"\t|-\t(B "+copAsym+"(tA-tB) A)\t(Truth:Induction, Variables:Introduce$#)"
#print "("+ConjCop+" A B)\t\t\t\t\t|-\tA\t\t\t(Truth:Deduction, Desire:Induction)"
(tParam, tParam2) = (", Time:-t" if isBackward else ", Time:+t", ", Time:+t" if isBackward else ", Time:-t")
#print "A,\t\t(A "+copAsym+" B)\t\t\t|-\tB\t\t\t(Truth:Deduction, Desire:Induction, Variables:Unify$#"+(tParam if copAsymHasTimeOffset else "")+")"
#print "B,\t\t(A "+copAsym+" B)\t\t\t|-\tA\t\t\t(Truth:Abduction, Desire:Deduction, Variables:Unify$#"+(tParam2 if copAsymHasTimeOffset else "")+")"
#if copSym != None:
# print "B,\t\t(A "+copSym+" B)\t\t\t|-\tA\t\t\t(Truth:Analogy, Desire:Strong, Variables:Unify$#)"
for cop in [copAsym,copSym]:
if cop == None:
continue
copZ = ival(cop,"z")
if MinusCops != None:
if True:
gen(("A",cop,"B"),("C",copZ,"B"), [], ((MinusCops[1],"A","C"),cop,"B"), ("difference", ""), "")
gen(("A",cop,"B"),("A",copZ,"C"), [], ("B",cop,(MinusCops[0],"A","C")), ("difference", ""), "")
gen(("S",cop,"M"),((MinusCops[1],"S","P"),copZ,"B"), [],("P",cop,"M"), ("decomposePNP", ""), "")
gen(("S",cop,"M"),((MinusCops[1],"P","S"),copZ,"B"), [],("P",cop,"M"), ("decomposeNNN", ""), "")
gen(("M",cop,"S"),("M",copZ,(MinusCops[0],"S","P")), [],("M",cop,"P"), ("decomposePNP", ""), "")
gen(("M",cop,"S"),("M",copZ,(MinusCops[0],"P","S")), [],("M",cop,"P"), ("decomposeNNN", ""), "")
for cop in | |
import copy
import time
from PIL import Image
spot_width = 100
spot_height = 100
spots_left_coords = []
spots_top_coords = []
black_pieces_on_black_tiles = []
white_pieces_on_black_tiles = []
black_pieces_on_white_tiles = []
white_pieces_on_white_tiles = []
black_tile_tmp = None
zero2seven = [0, 1, 2, 3, 4, 5, 6, 7]
def resize_pieces_on_tiles():
global spot_width
global spot_height
global black_pieces_on_black_tiles
global white_pieces_on_black_tiles
global black_pieces_on_white_tiles
global white_pieces_on_white_tiles
bob = []
for img in black_pieces_on_black_tiles:
bob.append(img.resize((int(spot_width), int(spot_height))))
black_pieces_on_black_tiles = bob
wob = []
for img in white_pieces_on_black_tiles:
wob.append(img.resize((int(spot_width), int(spot_height))))
white_pieces_on_black_tiles = wob
bow = []
for img in black_pieces_on_white_tiles:
bow.append(img.resize((int(spot_width), int(spot_height))))
black_pieces_on_white_tiles = bow
wow = []
global black_tile_tmp
for img in [black_tile_tmp] + white_pieces_on_white_tiles:
wow.append(img.resize((int(spot_width), int(spot_height))))
white_pieces_on_white_tiles = wow
def preprocess_pieces_and_tiles(path):
global spot_width
global spot_height
global black_pieces_on_black_tiles
global white_pieces_on_black_tiles
global black_pieces_on_white_tiles
global white_pieces_on_white_tiles
pieces_path = path + '\\pieces'
tiles_path = path + '\\tiles'
black_tile_path = tiles_path + '\\black.png'
white_tile_path = tiles_path + '\\white.png'
black_pieces_path = pieces_path + '\\black'
white_pieces_path = pieces_path + '\\white'
pieces_names = ['pawn', 'bishop', 'knight', 'rook', 'queen', 'king']
black_pieces = [black_pieces_path + '\\' + piece_name + '.png' for piece_name in pieces_names]
white_pieces = [white_pieces_path + '\\' + piece_name + '.png' for piece_name in pieces_names]
black_tile_img = Image.open(black_tile_path)
global black_tile_tmp
black_tile_tmp = copy.deepcopy(black_tile_img)
white_tile_img = Image.open(white_tile_path)
black_pieces_on_black_tiles = [black_tile_img]
black_pieces_on_white_tiles = [white_tile_img]
for black_piece in black_pieces:
black_piece_img = Image.open(black_piece)
assert black_piece_img.size == black_tile_img.size == white_tile_img.size
mask = black_piece_img.split()[1 if black_piece_img.mode == 'LA' else 3].convert('1')
piece_on_black_tile = Image.composite(black_piece_img, black_tile_img, mask)
piece_on_white_tile = Image.composite(black_piece_img, white_tile_img, mask)
black_pieces_on_black_tiles.append(piece_on_black_tile)
black_pieces_on_white_tiles.append(piece_on_white_tile)
white_pieces_on_black_tiles = [black_tile_img]
white_pieces_on_white_tiles = [white_tile_img]
for white_piece in white_pieces:
white_piece_img = Image.open(white_piece)
assert white_piece_img.size == black_tile_img.size == white_tile_img.size
mask = white_piece_img.split()[1 if white_piece_img.mode == 'LA' else 3].convert('1')
piece_on_black_tile = Image.composite(white_piece_img, black_tile_img, mask)
piece_on_white_tile = Image.composite(white_piece_img, white_tile_img, mask)
white_pieces_on_black_tiles.append(piece_on_black_tile)
white_pieces_on_white_tiles.append(piece_on_white_tile)
def show_field(board_img, i, j):
global spot_width
global spot_height
global spots_left_coords
global spots_top_coords
top = spots_top_coords[i]
left = spots_left_coords[j]
right = left + spot_width
bottom = top + spot_height
spot = board_img.crop((left, top, right, bottom))
spot.show()
def get_board_position(board_img):
global spot_width
global spot_height
global spots_left_coords
global spots_top_coords
board_position = []
for left in spots_left_coords:
for top in spots_top_coords:
right = left + spot_width
bottom = top + spot_height
spot_on_board = board_img.crop((left, top, right, bottom))
board_position.append(spot_on_board)
return board_position
def divide_board(image, board_coord):
board_image = image.crop(board_coord)
global spot_width
global spot_height
global spots_left_coords
global spots_top_coords
spot_width = board_image.width / 8
spot_height = board_image.height / 8
spots_left_coords = [0]
spots_top_coords = [0]
for i in range(7):
spots_left_coords.append(spots_left_coords[i] + spot_width)
spots_top_coords.append(spots_top_coords[i] + spot_height)
board_position = get_board_position(board_image)
return board_position
def first_part(path):
image = Image.open(path)
board_coord = image.getbbox()
return image, board_coord
def diff(J, I):
arr1 = J.convert('L')
arr2 = I.convert('L')
suma = 0
for i in range(arr2.size[0]):
for j in range(arr2.size[1]):
suma = suma + (arr2.getpixel((i, j)) - arr1.getpixel((i, j))) ** 2
return suma
def recognize_piece(spot, pieces_with_labels):
min_diff = -1.0
label_of_min = ''
# debuglist = []
for piece_with_label in pieces_with_labels:
piece = piece_with_label[0]
label = piece_with_label[1]
new_diff = diff(spot, piece)
# debuglist.append(new_diff)
if min_diff < 0 or new_diff < min_diff:
min_diff = new_diff
label_of_min = label
if -0.1 < min_diff < 0.1:
break
# print(debuglist, label_of_min if 'empty' not in label_of_min else '')
return label_of_min
def recognize_position(pos):
global white_pieces_on_white_tiles
global white_pieces_on_black_tiles
global black_pieces_on_white_tiles
global black_pieces_on_black_tiles
resize_pieces_on_tiles()
# all_pieces_on_all_tiles
apoat = white_pieces_on_white_tiles + white_pieces_on_black_tiles
apoat += black_pieces_on_white_tiles + black_pieces_on_black_tiles
piece_labels = ['pawn', 'bishop', 'knight', 'rook', 'queen', 'king']
white_pieces_labels = ['white_' + piece_name for piece_name in piece_labels]
black_pieces_labels = ['black_' + piece_name for piece_name in piece_labels]
black_on_black_labels = [piece_name + '_on_black' for piece_name in black_pieces_labels]
black_on_white_labels = [piece_name + '_on_white' for piece_name in black_pieces_labels]
white_on_black_labels = [piece_name + '_on_black' for piece_name in white_pieces_labels]
white_on_white_labels = [piece_name + '_on_white' for piece_name in white_pieces_labels]
labels = ['empty_on_black', 'empty_on_white'] + white_on_white_labels + ['empty_on_black'] + white_on_black_labels
labels += ['empty_on_white'] + black_on_white_labels + ['empty_on_black'] + black_on_black_labels
pieces_with_labels = list(zip(apoat, labels))
position_text = []
for spot in pos:
label = recognize_piece(spot, pieces_with_labels)
position_text.append(label)
return position_text
def fix_row(el):
if 'empty' in el:
return 1
if 'black_pawn' in el:
return 'p'
if 'black_bishop' in el:
return 'b'
if 'black_knight' in el:
return 'n'
if 'black_rook' in el:
return 'r'
if 'black_queen' in el:
return 'q'
if 'black_king' in el:
return 'k'
if 'white_pawn' in el:
return 'P'
if 'white_bishop' in el:
return 'B'
if 'white_knight' in el:
return 'N'
if 'white_rook' in el:
return 'R'
if 'white_queen' in el:
return 'Q'
if 'white_king' in el:
return 'K'
# print('This should never happen ', el)
# assert 1 == 0
return 1
def following_ones(row):
new = []
for i in range(len(row) - 1):
if row[i] != 1:
new.append(0)
continue
if i > 0:
if row[i - 1] == 1:
new.append(-1)
continue
counter = 0
for j in range(i + 1, len(row)):
if row[j] == 1:
counter += 1
else:
break
new.append(counter)
if row[-1] == 1 and row[-2] in [1, 2, 3, 4, 5, 6, 7, 8]:
new.append(-1)
else:
new.append(0)
return new
def group_ones(row):
fo = following_ones(row)
for i in range(len(fo)):
if row[i] == 1:
row[i] += fo[i]
while 0 in row:
row.pop(row.index(0))
return row
def getFEN(pos_txt):
assert len(pos_txt) == 64
rows = []
for i in range(8):
rows.append([])
current = 0
for piece in pos_txt:
rows[current].append(piece)
current += 1
current = current % 8
fem = []
for row in rows:
fem.append(list(map(fix_row, row)))
rows = copy.deepcopy(fem)
fem = list(map(group_ones, fem))
finalFem = ''
for arr in fem:
for el in arr:
finalFem = finalFem + str(el)
finalFem = finalFem + '/'
return finalFem[:-1], rows
def is_pawn_giving_a_check(rows, color):
if color == 'w' or color == 'W':
p = 'P'
p_coos = []
for i in range(len(rows)):
for j in range(len(rows[0])):
if rows[i][j] == p:
p_coos.append((i, j))
if len(p_coos) == 0:
return False
for coo in p_coos:
if coo[1] != 0:
if rows[coo[0] - 1][coo[1] - 1] == 'k':
return True
if coo[1] != 7:
if rows[coo[0] - 1][coo[1] + 1] == 'k':
return True
return False
else:
p = 'p'
assert color == 'b' or color == 'B'
p_coos = []
for i in range(len(rows)):
for j in range(len(rows[0])):
if rows[i][j] == p:
p_coos.append((i, j))
if len(p_coos) == 0:
return False
for coo in p_coos:
if coo[1] != 0:
if rows[coo[0] + 1][coo[1] - 1] == 'K':
return True
if coo[1] != 7:
if rows[coo[0] + 1][coo[1] + 1] == 'K':
return True
return False
def find_king_coo(rows, kingcolor):
if kingcolor == 'b' or kingcolor == 'B':
k = 'k'
else:
k = 'K'
for i in range(len(rows)):
for j in range(len(rows[0])):
if rows[i][j] == k:
return i, j
return 0, 0
def look_for(letter, possible_coos, rows):
global zero2seven
returning = []
for coo in possible_coos:
if coo[0] not in zero2seven or coo[1] not in zero2seven:
continue
if rows[coo[0]][coo[1]] == letter:
returning.append((coo[0], coo[1]))
if len(returning) > 0:
return returning
return None
def is_knight_giving_a_check(rows, color):
global zero2seven
if color == 'w' or color == 'W':
black_king_coo = find_king_coo(rows, 'b') # it must exist somewhere
i = black_king_coo[0]
j = black_king_coo[1]
possible_knight_coos = [(i + 2, j + 1), (i - 2, j + 1), (i + 2, j - 1), (i - 2, j - 1), (i + 1, j + 2),
(i - 1, j + 2), (i + 1, j - 2), (i - 1, j - 2)]
return False if look_for('N', possible_knight_coos, rows) is None else True
else:
white_king_coo = find_king_coo(rows, 'w') # it must exist somewhere
i = white_king_coo[0]
j = white_king_coo[1]
possible_knight_coos = [(i + 2, j + 1), (i - 2, j + 1), (i + 2, j - 1), (i - 2, j - 1), (i + 1, j + 2),
(i - 1, j + 2), (i + 1, j - 2), (i - 1, j - 2)]
return False if look_for('n', possible_knight_coos, rows) is None else True
def is_rook_giving_a_check(rows, color, queencall=False):
global zero2seven
if color == 'w' or color == 'W':
r = 'R'
if queencall:
r = 'Q'
black_king_coo = find_king_coo(rows, 'b') # it must exist somewhere
i = black_king_coo[0]
j = black_king_coo[1]
possible_rook_coos = list(zip([i, i, i, i, i, i, i, i], zero2seven)) | |
#
# Spec2Vec
#
# Copyright 2019 Netherlands eScience Center
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Import libraries
import numpy as np
import networkx as nx
import community
from networkx.algorithms.connectivity import minimum_st_edge_cut # , minimum_st_node_cut
from networkx.algorithms.flow import shortest_augmenting_path
import pandas as pd
from matplotlib import pyplot as plt
import matplotlib
# ----------------------------------------------------------------------------
# ---------------- Graph / networking related functions ----------------------
# ----------------------------------------------------------------------------
def create_network(similars_idx,
similars,
max_links=10,
cutoff=0.7,
link_method='single'):
"""
Function to create network from given top-n similarity values.
Args:
--------
similars_idx: numpy array
Array with indices of top-n most similar nodes.
similars: numpy array
Array with similarity values of top-n most similar nodes.
max_links: int
Maximum number of links to add per node. Default = 10.
Due to incoming links, total number of links per node can be higher.
cutoff: float
Threshold for given similarities. Edges/Links will only be made for
similarities > cutoff. Default = 0.7.
link_method: str
Chose between 'single' and 'mutual'. 'single will add all links based
on individual nodes. 'mutual' will only add links if that link appears
in the given top-n list for both nodes.
"""
dimension = similars_idx.shape[0]
# Initialize network graph, add nodes
msnet = nx.Graph()
msnet.add_nodes_from(np.arange(0, dimension))
# Add edges based on global threshold for weights
for i in range(0, dimension):
idx = np.where(similars[i, :] > cutoff)[0][:max_links]
if link_method == "single":
new_edges = [(i, int(similars_idx[i, x]), float(similars[i, x]))
for x in idx if similars_idx[i, x] != i]
elif link_method == "mutual":
new_edges = [(i, int(similars_idx[i, x]), float(similars[i, x]))
for x in idx
if similars_idx[i, x] != i and i in similars_idx[x, :]
]
else:
print("Link method not kown")
msnet.add_weighted_edges_from(new_edges)
return msnet
def sample_cuts(graph, max_steps=1000, max_cuts=1):
""" Function to help find critical links in the given graph.
Critical links here are links which -once removed- would disconnect considerable
parts of the network. Those links are searched for by counting minimum cuts between
a large number of node pairs (up to max_steps pairs will be explored).
If more pairs exist than max_steps allows to explore, pick max_steps random pairs.
Args:
-------
graph: networkx graph
Graph of individual cluster (created using networkx).
max_steps
Up to max_steps pairs will be explored to search for cuts. Default = 1000.
max_cuts
Maximum numbers of links allowed to be cut. Default = 1.
"""
num_nodes = graph.number_of_nodes()
# num_edges = graph.number_of_edges()
# Make list of all pairs within graph
nodes = np.array(graph.nodes)
pairs = np.array(np.meshgrid(nodes, nodes)).T
remove_diagonal = np.array([(i * num_nodes + i) for i in range(num_nodes)])
pairs = np.delete(pairs.reshape(-1, 2), remove_diagonal, axis=0)
sampled_cuts = []
if pairs.shape[0] <= max_steps:
max_steps = pairs.shape[0]
else:
# If more pairs exist than max_steps allows to explore, pick max_steps random pairs.
choices = np.random.choice(np.arange(pairs.shape[0]),
max_steps,
replace=False)
pairs = pairs[choices, :]
for pair in pairs:
cuts = minimum_st_edge_cut(graph,
pair[0],
pair[1],
flow_func=shortest_augmenting_path)
# nx.node_connectivity(graphs[4], 592, 376)
# cuts = nx.minimum_st_edge_cut(graph, pair[0], pair[1])
# cuts = nx.minimum_edge_cut(graph, pair[0], pair[1])#, flow_func=shortest_augmenting_path)
if len(cuts) <= max_cuts:
sampled_cuts.append(cuts)
return sampled_cuts
def weak_link_finder(graph, max_steps=1000, max_cuts=1):
""" Function to detect critical links in the given graph.
Critical links here are links which -once removed- would disconnect considerable
parts of the network. Those links are searched for by counting minimum cuts between
a large number of node pairs (up to max_steps pairs will be explored).
If more pairs exist than max_steps allows to explore, pick max_steps random pairs.
Args:
-------
graph: networkx graph
Graph of individual cluster (created using networkx).
max_steps
Up to max_steps pairs will be explored to search for cuts. Default = 1000.
max_cuts
Maximum numbers of links allowed to be cut. Default = 1.
"""
sampled_cuts = sample_cuts(graph, max_steps=max_steps, max_cuts=max_cuts)
sampled_cuts_len = [len(x) for x in sampled_cuts]
proposed_cuts = []
for min_cuts in list(set(sampled_cuts_len)):
sampled_cuts_select = [
list(x)[:min_cuts] for x in sampled_cuts if len(x) == min_cuts
]
sampled_cuts_select = np.array(sampled_cuts_select)
# Sort array
if min_cuts > 1:
sampled_cuts_select = np.sort(np.sort(sampled_cuts_select, axis=2),
axis=1)
else:
sampled_cuts_select = np.sort(sampled_cuts_select, axis=2)
# Find unique cuts and count occurences
cuts_unique, cuts_count = row_counts(
sampled_cuts_select.reshape(-1, min_cuts * 2))
# Return most promising cuts
proposed_cuts.append((min_cuts, cuts_unique, cuts_count))
return proposed_cuts
def dilate_cluster(graph_main,
similars_idx,
similars,
max_cluster_size=100,
min_cluster_size=10,
max_per_node=1,
max_per_cluster=None,
min_weight=0.5):
""" Add more links to clusters that are < min_cluster_size.
This function is in particular made to avoid small remaining clusters or singletons.
Will only add links if they won't lead to clusters > max_cluster_size,
and if the links have weights > min_weight.
Starts iteratively from highest weight links that are not yet part of the network
(out of given top-n links).
Args:
--------
graph_main: networkx graph
Graph, e.g. made using create_network() function. Based on networkx.
similars_idx: numpy array
Array with indices of top-n most similar nodes.
similars: numpy array
Array with similarity values of top-n most similar nodes.
max_cluster_size: int
Maximum desired size of clusters. Default = 100.
min_cluster_size: int
Minimum desired size of clusters. Default = 10.
max_per_node: int
Only add the top max_addition ones per cluster. Default = 1.
max_per_cluster: int, None
Only add the top max_addition ones per cluster. Ignore if set to None. Default = None.
min_weight: float
Set minimum weight to be considered for making link. Default = 0.5.
"""
links_added = []
# Split graph into separate clusters
graphs = list(nx.connected_component_subgraphs(graph_main))
for graph in graphs:
cluster_size = len(graph.nodes)
if cluster_size < min_cluster_size:
best_scores = []
potential_links = []
for ID in graph.nodes:
nodes_connected = []
for key in graph[ID].keys():
nodes_connected.append(key)
potential_new_links = [(i, x)
for i, x in enumerate(similars_idx[ID])
if x not in nodes_connected and x != ID]
best_score_arr = similars[ID][[
x[0] for x in potential_new_links
]]
select = np.where(
best_score_arr >= min_weight)[0][:max_per_node]
# if best_score >= min_weight:
if select.shape[0] > 0:
for s in select:
best_scores.append(best_score_arr[s])
potential_link = (ID,
[x[1]
for x in potential_new_links][s])
potential_links.append(potential_link)
if max_per_cluster is None:
selected_candidates = np.argsort(best_scores)[::-1]
else:
# Only add the top max_addition ones
selected_candidates = np.argsort(
best_scores)[::-1][:max_per_cluster]
for ID in selected_candidates:
# node_id = list(graph.nodes)[ID]
node_id = potential_links[ID][0]
# Only add link if no cluster > max_cluster_size is formed by it
if (len(
nx.node_connected_component(graph_main,
potential_links[ID][1])) +
cluster_size) <= max_cluster_size:
# Actual adding of new links
graph_main.add_edge(node_id,
potential_links[ID][1],
weight=best_scores[ID])
links_added.append((node_id, potential_links[ID][1]))
# Update cluster_size to keep track of growing clusters
cluster_size = len(
nx.node_connected_component(graph_main,
potential_links[ID][1]))
return graph_main, links_added
def erode_clusters(graph_main, max_cluster_size=100, keep_weights_above=0.8):
""" Remove links from clusters that are > max_cluster_size.
This function is in particular made to avoid small remaining clusters or singletons.
Will only add links if they won't lead to clusters > max_cluster_size,
and if the links have weights > min_weight.
Starts iteratively from highest weight links that are not yet part of the network.
Args:
--------
graph_main: networkx graph
Graph, e.g. made using create_network() function. Based on networkx.
max_cluster_size: int
Maximum desired size of clusters. Default = 100.
keep_weights_above: float
Set threshold above which weights will not be removed. Default = 0.8.
"""
links_removed = []
# Split graph into separate clusters
graphs = list(nx.connected_component_subgraphs(graph_main))
for graph in graphs:
cluster_size = len(graph.nodes)
while cluster_size > max_cluster_size:
edges = list(graph.edges)
edges_weights = np.array(
[graph[x[0]][x[1]]['weight'] for x in edges])
weakest_edge = edges_weights.argsort()[0]
if edges_weights[weakest_edge] < keep_weights_above:
print("Remove edge:", edges[weakest_edge][0],
edges[weakest_edge][1])
graph.remove_edge(edges[weakest_edge][0],
edges[weakest_edge][1])
graph_main.remove_edge(edges[weakest_edge][0],
edges[weakest_edge][1])
links_removed.append(edges[weakest_edge])
# If link removal caused split of cluster:
if not nx.is_connected(graph):
subgraphs = list(nx.connected_component_subgraphs(graph))
print("Getting from cluster with", len(graph.nodes),
"nodes, to clusters with",
[len(x.nodes) for x in subgraphs], "nodes.")
idx1 = np.argmax([len(x.nodes) for x in subgraphs])
graph = subgraphs[idx1] # keep largest subcluster here
cluster_size = len(graph.nodes)
return graph_main, links_removed
def add_intra_cluster_links(graph_main, m_sim, min_weight=0.5, max_links=20):
""" Add links within each separate cluster if | |
# ### Stochlite Pybullet Environment
# Last Update by <NAME> (May, 2021)
import numpy as np
import gym
from gym import spaces
import envs.environments.stoch_env.trajectory_generator as trajectory_generator
import math
import random
from collections import deque
import pybullet
import envs.environments.stoch_env.bullet_client as bullet_client
import pybullet_data
import envs.environments.stoch_env.planeEstimation.get_terrain_normal as normal_estimator
import matplotlib.pyplot as plt
from envs.environments.stoch_env.utils.logger import DataLog
import os
# LEG_POSITION = ["fl_", "bl_", "fr_", "br_"]
# KNEE_CONSTRAINT_POINT_RIGHT = [0.014, 0, 0.076] #hip
# KNEE_CONSTRAINT_POINT_LEFT = [0.0,0.0,-0.077] #knee
RENDER_HEIGHT = 720
RENDER_WIDTH = 960
PI = np.pi
class StochliteEnv(gym.Env):
def __init__(self,
render = False,
on_rack = False,
gait = 'trot',
phase = [0, PI, PI,0],#[FR, FL, BR, BL]
action_dim = 20,
end_steps = 1000,
stairs = False,
downhill =False,
seed_value = 100,
wedge = False,
IMU_Noise = False,
deg = 11): # deg = 5
self._is_stairs = stairs
self._is_wedge = wedge
self._is_render = render
self._on_rack = on_rack
self.rh_along_normal = 0.24
self.seed_value = seed_value
random.seed(self.seed_value)
if self._is_render:
self._pybullet_client = bullet_client.BulletClient(connection_mode=pybullet.GUI)
else:
self._pybullet_client = bullet_client.BulletClient()
# self._theta = 0
self._frequency = 2.5 # originally 2.5, changing for stability
self.termination_steps = end_steps
self.downhill = downhill
#PD gains
self._kp = 400
self._kd = 10
self.dt = 0.01
self._frame_skip = 50
self._n_steps = 0
self._action_dim = action_dim
self._obs_dim = 8
self.action = np.zeros(self._action_dim)
self._last_base_position = [0, 0, 0]
self.last_rpy = [0, 0, 0]
self._distance_limit = float("inf")
self.current_com_height = 0.25 # 0.243
#wedge_parameters
self.wedge_start = 0.5
self.wedge_halflength = 2
if gait is 'trot':
phase = [0, PI, PI, 0]
elif gait is 'walk':
phase = [0, PI, 3*PI/2 ,PI/2]
self._trajgen = trajectory_generator.TrajectoryGenerator(gait_type=gait, phase=phase)
self.inverse = False
self._cam_dist = 1.0
self._cam_yaw = 0.0
self._cam_pitch = 0.0
self.avg_vel_per_step = 0
self.avg_omega_per_step = 0
self.linearV = 0
self.angV = 0
self.prev_vel=[0,0,0]
self.prev_ang_vels = [0, 0, 0] # roll_vel, pitch_vel, yaw_vel of prev step
self.total_power = 0
self.x_f = 0
self.y_f = 0
self.clips=7
self.friction = 0.6
# self.ori_history_length = 3
# self.ori_history_queue = deque([0]*3*self.ori_history_length,
# maxlen=3*self.ori_history_length)#observation queue
self.step_disp = deque([0]*100, maxlen=100)
self.stride = 5
self.incline_deg = deg
self.incline_ori = 0
self.prev_incline_vec = (0,0,1)
self.terrain_pitch = []
self.add_IMU_noise = IMU_Noise
self.INIT_POSITION =[0,0,0.3] # [0,0,0.3], Spawning stochlite higher to remove initial drift
self.INIT_ORIENTATION = [0, 0, 0, 1]
self.support_plane_estimated_pitch = 0
self.support_plane_estimated_roll = 0
self.pertub_steps = 0
self.x_f = 0
self.y_f = 0
## Gym env related mandatory variables
self._obs_dim = 8 #[roll, pitch, roll_vel, pitch_vel, yaw_vel, SP roll, SP pitch, cmd_xvel, cmd_yvel, cmd_avel]
observation_high = np.array([np.pi/2] * self._obs_dim)
observation_low = -observation_high
self.observation_space = spaces.Box(observation_low, observation_high)
action_high = np.array([1] * self._action_dim)
self.action_space = spaces.Box(-action_high, action_high)
self.commands = np.array([0, 0, 0]) #Joystick commands consisting of cmd_x_velocity, cmd_y_velocity, cmd_ang_velocity
self.max_linear_xvel = 0.5 #0.4, made zero for only ang vel # calculation is < 0.2 m steplength times the frequency 2.5 Hz
self.max_linear_yvel = 0.25 #0.25, made zero for only ang vel # calculation is < 0.14 m times the frequency 2.5 Hz
self.max_ang_vel = 2 #considering less than pi/2 steer angle # less than one complete rotation in one second
self.max_steplength = 0.2 # by the kinematic limits of the robot
self.max_steer_angle = PI/2 #plus minus PI/2 rads
self.max_x_shift = 0.1 #plus minus 0.1 m
self.max_y_shift = 0.14 # max 30 degree abduction
self.max_z_shift = 0.1 # plus minus 0.1 m
self.max_incline = 15 # in deg
self.robot_length = 0.334 # measured from stochlite
self.robot_width = 0.192 # measured from stochlite
self.hard_reset()
self.Set_Randomization(default=True, idx1=2, idx2=2)
self.logger = DataLog()
if(self._is_stairs):
boxHalfLength = 0.1
boxHalfWidth = 1
boxHalfHeight = 0.015
sh_colBox = self._pybullet_client.createCollisionShape(self._pybullet_client.GEOM_BOX,halfExtents=[boxHalfLength,boxHalfWidth,boxHalfHeight])
boxOrigin = 0.3
n_steps = 15
self.stairs = []
for i in range(n_steps):
step =self._pybullet_client.createMultiBody(baseMass=0,baseCollisionShapeIndex = sh_colBox,basePosition = [boxOrigin + i*2*boxHalfLength,0,boxHalfHeight + i*2*boxHalfHeight],baseOrientation=[0.0,0.0,0.0,1])
self.stairs.append(step)
self._pybullet_client.changeDynamics(step, -1, lateralFriction=0.8)
def hard_reset(self):
'''
Function to
1) Set simulation parameters which remains constant throughout the experiments
2) load urdf of plane, wedge and robot in initial conditions
'''
self._pybullet_client.resetSimulation()
self._pybullet_client.setPhysicsEngineParameter(numSolverIterations=int(300))
self._pybullet_client.setTimeStep(self.dt/self._frame_skip)
self.plane = self._pybullet_client.loadURDF("%s/plane.urdf" % pybullet_data.getDataPath())
self._pybullet_client.changeVisualShape(self.plane,-1,rgbaColor=[1,1,1,0.9])
self._pybullet_client.setGravity(0, 0, -9.8)
if self._is_wedge:
wedge_halfheight_offset = 0.01
self.wedge_halfheight = wedge_halfheight_offset + 1.5 * math.tan(math.radians(self.incline_deg)) / 2.0
self.wedgePos = [0, 0, self.wedge_halfheight]
self.wedgeOrientation = self._pybullet_client.getQuaternionFromEuler([0, 0, self.incline_ori])
if not (self.downhill):
wedge_model_path = "envs/environments/stoch_env/Wedges/uphill/urdf/wedge_" + str(
self.incline_deg) + ".urdf"
self.INIT_ORIENTATION = self._pybullet_client.getQuaternionFromEuler(
[math.radians(self.incline_deg) * math.sin(self.incline_ori),
-math.radians(self.incline_deg) * math.cos(self.incline_ori), 0])
self.robot_landing_height = wedge_halfheight_offset + 0.28 + math.tan(
math.radians(self.incline_deg)) * abs(self.wedge_start)
# self.INIT_POSITION = [self.INIT_POSITION[0], self.INIT_POSITION[1], self.robot_landing_height]
self.INIT_POSITION = [-0.8, 0.0, 0.38] #[-0.8, 0, self.robot_landing_height]
else:
wedge_model_path = "envs/environments/stoch_env/Wedges/downhill/urdf/wedge_" + str(
self.incline_deg) + ".urdf"
self.robot_landing_height = wedge_halfheight_offset + 0.28 + math.tan(
math.radians(self.incline_deg)) * 1.5
self.INIT_POSITION = [0, 0, self.robot_landing_height] # [0.5, 0.7, 0.3] #[-0.5,-0.5,0.3]
self.INIT_ORIENTATION = [0, 0, 0, 1] #[ 0, -0.0998334, 0, 0.9950042 ]
self.wedge = self._pybullet_client.loadURDF(wedge_model_path, self.wedgePos, self.wedgeOrientation)
self.SetWedgeFriction(0.7)
model_path = os.path.join(os.getcwd(), 'envs/environments/stoch_env/robots/stochlite/stochlite_description/urdf/stochlite_urdf.urdf')
self.stochlite = self._pybullet_client.loadURDF(model_path, self.INIT_POSITION,self.INIT_ORIENTATION)
self._joint_name_to_id, self._motor_id_list = self.BuildMotorIdList()
num_legs = 4
for i in range(num_legs):
self.ResetLeg(i, add_constraint=True)
self.ResetPoseForAbd()
if self._on_rack:
self._pybullet_client.createConstraint(
self.stochlite, -1, -1, -1, self._pybullet_client.JOINT_FIXED,
[0, 0, 0], [0, 0, 0], [0, 0, 0.4])
self._pybullet_client.resetBasePositionAndOrientation(self.stochlite, self.INIT_POSITION, self.INIT_ORIENTATION)
self._pybullet_client.resetBaseVelocity(self.stochlite, [0, 0, 0], [0, 0, 0])
self._pybullet_client.resetDebugVisualizerCamera(self._cam_dist, self._cam_yaw, self._cam_pitch, [0, 0, 0])
self.SetFootFriction(self.friction)
# self.SetLinkMass(0,0)
# self.SetLinkMass(11,0)
def reset_standing_position(self):
num_legs = 4
for i in range(num_legs):
self.ResetLeg(i, add_constraint=False, standstilltorque=10)
self.ResetPoseForAbd()
# Conditions for standstill
for i in range(300):
self._pybullet_client.stepSimulation()
for i in range(num_legs):
self.ResetLeg(i, add_constraint=False, standstilltorque=0)
def reset(self):
'''
This function resets the environment
Note : Set_Randomization() is called before reset() to either randomize or set environment in default conditions.
'''
# self._theta = 0
self._last_base_position = [0, 0, 0]
self.commands = [0, 0, 0]
self.last_rpy = [0, 0, 0]
self.inverse = False
if self._is_wedge:
self._pybullet_client.removeBody(self.wedge)
wedge_halfheight_offset = 0.01
self.wedge_halfheight = wedge_halfheight_offset + 1.5 * math.tan(math.radians(self.incline_deg)) / 2.0
self.wedgePos = [0, 0, self.wedge_halfheight]
self.wedgeOrientation = self._pybullet_client.getQuaternionFromEuler([0, 0, self.incline_ori])
if not (self.downhill):
wedge_model_path = "envs/environments/stoch_env/Wedges/uphill/urdf/wedge_" + str(self.incline_deg) + ".urdf"
self.INIT_ORIENTATION = self._pybullet_client.getQuaternionFromEuler(
[math.radians(self.incline_deg) * math.sin(self.incline_ori),
-math.radians(self.incline_deg) * math.cos(self.incline_ori), 0])
self.robot_landing_height = wedge_halfheight_offset + 0.28 + math.tan(math.radians(self.incline_deg)) * abs(self.wedge_start)
# self.INIT_POSITION = [self.INIT_POSITION[0], self.INIT_POSITION[1], self.robot_landing_height]
self.INIT_POSITION = [-0.8, 0.0, 0.38] #[-0.8, 0, self.robot_landing_height]
else:
wedge_model_path = "envs/environments/stoch_env/Wedges/downhill/urdf/wedge_" + str(self.incline_deg) + ".urdf"
self.robot_landing_height = wedge_halfheight_offset + 0.28 + math.tan(math.radians(self.incline_deg)) * 1.5
self.INIT_POSITION = [0, 0, self.robot_landing_height] # [0.5, 0.7, 0.3] #[-0.5,-0.5,0.3]
self.INIT_ORIENTATION = [0, 0, 0, 1]
self.wedge = self._pybullet_client.loadURDF(wedge_model_path, self.wedgePos, self.wedgeOrientation)
self.SetWedgeFriction(0.7)
self._pybullet_client.resetBasePositionAndOrientation(self.stochlite, self.INIT_POSITION, self.INIT_ORIENTATION)
self._pybullet_client.resetBaseVelocity(self.stochlite, [0, 0, 0], [0, 0, 0])
self.reset_standing_position()
self._pybullet_client.resetDebugVisualizerCamera(self._cam_dist, self._cam_yaw, self._cam_pitch, [0, 0, 0])
self._n_steps = 0
return self.GetObservation()
'''
Old Joy-stick Emulation Function
def updateCommands(self, num_plays, episode_length):
ratio = num_plays/episode_length
if num_plays < 0.2 * episode_length:
self.commands = [0, 0, 0]
elif num_plays < 0.8 * episode_length:
self.commands = np.array([self.max_linear_xvel, self.max_linear_yvel, self.max_ang_vel])*ratio
else:
self.commands = [self.max_linear_xvel, self.max_linear_yvel, self.max_ang_vel]
# self.commands = np.array([self.max_linear_xvel, self.max_linear_yvel, self.max_ang_vel])*ratio
'''
def apply_Ext_Force(self, x_f, y_f,link_index= 1,visulaize = False,life_time=0.01):
'''
function to apply external force on the robot
Args:
x_f : external force in x direction
y_f : external force in y direction
link_index : link index of the robot where the force need to be applied
visulaize : bool, whether to visulaize external force by arrow symbols
life_time : life time of the visualization
'''
force_applied = [x_f,y_f,0]
self._pybullet_client.applyExternalForce(self.stochlite, link_index, forceObj=[x_f,y_f,0],posObj=[0,0,0],flags=self._pybullet_client.LINK_FRAME)
f_mag = np.linalg.norm(np.array(force_applied))
if(visulaize and f_mag != 0.0):
point_of_force = self._pybullet_client.getLinkState(self.stochlite, link_index)[0]
lam = 1/(2*f_mag)
dummy_pt = [point_of_force[0]-lam*force_applied[0],
point_of_force[1]-lam*force_applied[1],
point_of_force[2]-lam*force_applied[2]]
self._pybullet_client.addUserDebugText(str(round(f_mag,2))+" N",dummy_pt,[0.13,0.54,0.13],textSize=2,lifeTime=life_time)
self._pybullet_client.addUserDebugLine(point_of_force,dummy_pt,[0,0,1],3,lifeTime=life_time)
def SetLinkMass(self,link_idx,mass=0):
'''
Function to add extra mass to front and back link of the robot
Args:
link_idx : link index of the robot whose weight to need be modified
mass : value of extra mass to be added
Ret:
new_mass : mass of the link after addition
Note : Presently, this function supports addition of masses in the front and back link only (0, 11)
'''
link_mass = self._pybullet_client.getDynamicsInfo(self.stochlite,link_idx)[0]
if(link_idx==0):
link_mass = mass # mass + 1.1
self._pybullet_client.changeDynamics(self.stochlite, 0, mass=link_mass)
elif(link_idx==11):
link_mass = mass # mass + 1.1
self._pybullet_client.changeDynamics(self.stochlite, 11, mass=link_mass)
return link_mass
def getlinkmass(self,link_idx):
'''
function to retrieve mass of any link
Args:
link_idx : link index of the robot
Ret:
m[0] : mass of the link
'''
m = self._pybullet_client.getDynamicsInfo(self.stochlite,link_idx)
return m[0]
def Set_Randomization(self, default = True, idx1 = 0, idx2=0, idx3=2, idx0=0, idx11=0, idxc=2, idxp=0, deg = 5, ori = 0): # deg = 5, changed for stochlite
'''
This function helps in randomizing the physical and dynamics parameters of the environment to robustify the policy.
These parameters include wedge incline, wedge orientation, friction, mass of links, motor strength and external perturbation force.
Note : If default argument is True, this function set above mentioned parameters in user defined manner
'''
if default:
frc=[0.5,0.6,0.8]
# extra_link_mass=[0,0.05,0.1,0.15]
cli=[5.2,6,7,8]
# pertub_range = [0, -30, 30, -60, 60]
self.pertub_steps = 150
self.x_f = 0
# self.y_f = pertub_range[idxp]
self.incline_deg = deg + 2*idx1
# self.incline_ori = ori + PI/12*idx2
self.new_fric_val =frc[idx3]
self.friction = self.SetFootFriction(self.new_fric_val)
# self.FrontMass = self.SetLinkMass(0,extra_link_mass[idx0])
# self.BackMass = self.SetLinkMass(11,extra_link_mass[idx11])
self.clips = cli[idxc]
else:
avail_deg = [5, 7, 9, 11, 13]
# avail_ori = [-PI/2, PI/2]
# extra_link_mass=[0,.05,0.1,0.15]
# pertub_range = [0, -30, 30, -60, 60]
cli=[5,6,7,8]
self.pertub_steps = 150 #random.randint(90,200) #Keeping fixed for now
self.x_f = 0
# self.y_f = pertub_range[random.randint(0,2)]
self.incline_deg = avail_deg[random.randint(0, 4)]
# self.incline_ori = avail_ori[random.randint(0, 1)] #(PI/12)*random.randint(0, 4) #resolution of 15 degree, changed for stochlite
self.new_fric_val = np.round(np.clip(np.random.normal(0.6,0.08),0.55,0.8),2)
self.friction = self.SetFootFriction(self.new_fric_val)
# i=random.randint(0,3)
# self.FrontMass = self.SetLinkMass(0,extra_link_mass[i])
# i=random.randint(0,3)
# self.BackMass = self.SetLinkMass(11,extra_link_mass[i])
self.clips = np.round(np.clip(np.random.normal(6.5,0.4),5,8),2)
def randomize_only_inclines(self, default=True, idx1=0, idx2=0, deg = 5, ori = 0): # deg = 5, changed for stochlite
'''
This function only randomizes the wedge incline and orientation and is called during training without Domain Randomization
'''
if default:
self.incline_deg = deg + 2 * idx1
# self.incline_ori = ori + PI / 12 * idx2
else:
avail_deg = [5, 7, 9, 11, 13]
# avail_ori = [-PI/2, PI/2]
self.incline_deg = avail_deg[random.randint(0, 4)]
# self.incline_ori = avail_ori[random.randint(0, 1)] #(PI / 12) * random.randint(0, 4) # resolution of 15 degree
def boundYshift(self, x, y):
'''
This function bounds Y shift with respect to current X shift
Args:
x : absolute X-shift
y : Y-Shift
Ret :
y : bounded Y-shift
'''
if x > 0.5619:
if y | |
<reponame>dcartman/pygame-menu<filename>test/test_widget_textinput.py
"""
pygame-menu
https://github.com/ppizarror/pygame-menu
TEST WIDGET - TEXTINPUT
Test TextInput and ColorInput widgets.
"""
__all__ = ['TextInputWidgetTest']
from test._utils import MenuUtils, surface, PygameEventUtils, TEST_THEME, PYGAME_V2, \
BaseTest
import pygame
import pygame_menu
import pygame_menu.controls as ctrl
from pygame_menu.widgets.core.widget import WidgetTransformationNotImplemented
class TextInputWidgetTest(BaseTest):
# noinspection SpellCheckingInspection,PyTypeChecker
def test_textinput(self) -> None:
"""
Test TextInput widget.
"""
menu = MenuUtils.generic_menu()
# Assert bad settings
self.assertRaises(ValueError,
lambda: menu.add.text_input('title',
input_type=pygame_menu.locals.INPUT_FLOAT,
default='bad'))
self.assertRaises(ValueError, # Default and password cannot coexist
lambda: menu.add.text_input('title',
password=True,
default='bad'))
# Create text input widget
textinput = menu.add.text_input('title', input_underline='_')
textinput.set_value('new_value') # No error
textinput._selected = False
textinput.draw(surface)
textinput.select(update_menu=True)
textinput.draw(surface)
self.assertEqual(textinput.get_value(), 'new_value')
textinput.clear()
self.assertEqual(textinput.get_value(), '')
# Create selection box
string = 'the text'
textinput._cursor_render = True
textinput.set_value(string)
textinput._select_all()
self.assertEqual(textinput._get_selected_text(), 'the text')
textinput.draw(surface)
textinput._unselect_text()
textinput.draw(surface)
# Assert events
textinput.update(PygameEventUtils.key(0, keydown=True, testmode=False))
PygameEventUtils.test_widget_key_press(textinput)
textinput.update(PygameEventUtils.key(ctrl.KEY_APPLY, keydown=True))
textinput.update(PygameEventUtils.key(pygame.K_LSHIFT, keydown=True))
textinput.clear()
# Type
textinput.update(PygameEventUtils.key(pygame.K_t, keydown=True, char='t'))
textinput.update(PygameEventUtils.key(pygame.K_e, keydown=True, char='e'))
textinput.update(PygameEventUtils.key(pygame.K_s, keydown=True, char='s'))
textinput.update(PygameEventUtils.key(pygame.K_t, keydown=True, char='t'))
# Keyup
textinput.update(PygameEventUtils.key(pygame.K_a, keyup=True, char='a'))
self.assertEqual(textinput.get_value(), 'test') # The text we typed
# Ctrl events
textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_c)) # copy
textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_v)) # paste
textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_z)) # undo
self.assertEqual(textinput.get_value(), 'tes')
textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_y)) # redo
self.assertEqual(textinput.get_value(), 'test')
textinput._select_all()
textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_x)) # cut
self.assertEqual(textinput.get_value(), '')
textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_z)) # undo
self.assertEqual(textinput.get_value(), 'test')
textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_y)) # redo
self.assertEqual(textinput.get_value(), '')
textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_z)) # undo
self.assertEqual(textinput.get_value(), 'test')
# Test ignore ctrl events
textinput._copy_paste_enabled = False
self.assertFalse(textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_c)))
self.assertFalse(textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_v)))
max_history = textinput._max_history
textinput._max_history = 0
self.assertFalse(textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_z)))
self.assertFalse(textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_y)))
self.assertFalse(textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_x)))
textinput._selection_enabled = False
self.assertFalse(textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_a)))
self.assertFalse(textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_r))) # invalid
# Reset
textinput._copy_paste_enabled = True
textinput._max_history = max_history
textinput._selection_enabled = True
# Test selection, if user selects all and types anything the selected
# text must be destroyed
textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_a)) # select all
textinput._unselect_text()
self.assertEqual(textinput._get_selected_text(), '')
textinput._select_all()
self.assertEqual(textinput._get_selected_text(), 'test')
textinput._unselect_text()
self.assertEqual(textinput._get_selected_text(), '')
textinput.update(PygameEventUtils.keydown_mod_ctrl(pygame.K_a))
self.assertEqual(textinput._get_selected_text(), 'test')
textinput.update(PygameEventUtils.key(pygame.K_t, keydown=True, char='t'))
textinput._select_all()
self.assertTrue(textinput.update(PygameEventUtils.key(pygame.K_ESCAPE, keydown=True)))
textinput._select_all()
self.assertTrue(textinput.update(PygameEventUtils.key(pygame.K_BACKSPACE, keydown=True)))
self.assertEqual(textinput.get_value(), '')
textinput.set_value('t')
# Releasing shift disable selection
textinput._selection_active = True
textinput.update(PygameEventUtils.key(pygame.K_LSHIFT, keyup=True))
self.assertFalse(textinput._selection_active)
# Arrows while selection
textinput._select_all()
self.assertIsNotNone(textinput._selection_surface)
textinput.update(PygameEventUtils.key(pygame.K_LEFT, keydown=True))
self.assertIsNone(textinput._selection_surface)
textinput._select_all()
self.assertIsNotNone(textinput._selection_surface)
textinput.update(PygameEventUtils.key(pygame.K_RIGHT, keydown=True))
self.assertIsNone(textinput._selection_surface)
textinput._select_all()
textinput._selection_active = True
self.assertEqual(textinput._selection_box, [0, 1])
textinput.update(PygameEventUtils.key(pygame.K_LEFT, keydown=True))
self.assertEqual(textinput._selection_box, [0, 0])
textinput._select_all()
textinput._selection_active = True
textinput.update(PygameEventUtils.key(pygame.K_RIGHT, keydown=True))
self.assertEqual(textinput._selection_box, [0, 1])
# Remove while selection
textinput._select_all()
textinput.update(PygameEventUtils.key(pygame.K_DELETE, keydown=True))
self.assertEqual(textinput.get_value(), '')
textinput.set_value('t')
# Now the value must be t
self.assertEqual(textinput._get_selected_text(), '')
self.assertEqual(textinput.get_value(), 't')
# Test readonly
textinput.update(PygameEventUtils.key(pygame.K_t, keydown=True, char='k'))
self.assertEqual(textinput.get_value(), 'tk')
textinput.readonly = True
textinput.update(PygameEventUtils.key(pygame.K_t, keydown=True, char='k'))
self.assertEqual(textinput.get_value(), 'tk')
textinput.readonly = False
# Test keyup
self.assertIn(pygame.K_t, textinput._keyrepeat_counters.keys())
self.assertFalse(textinput.update(
PygameEventUtils.key(pygame.K_t, keyup=True, char='1')))
self.assertNotIn(pygame.K_t, textinput._keyrepeat_counters.keys())
# Test tab
self.assertEqual(textinput._tab_size, 4)
textinput.update(PygameEventUtils.key(pygame.K_TAB, keydown=True))
self.assertEqual(textinput.get_value(), 'tk ')
# Test invalid unicode
self.assertFalse(textinput.update(PygameEventUtils.key(pygame.K_1, keydown=True)))
# Up/Down disable active status
textinput.active = True
textinput.update(PygameEventUtils.key(ctrl.KEY_MOVE_UP, keydown=True))
self.assertFalse(textinput.active)
textinput.active = True
textinput.update(PygameEventUtils.key(ctrl.KEY_MOVE_DOWN, keydown=True))
self.assertFalse(textinput.active)
textinput.active = True
self.assertTrue(textinput.update(PygameEventUtils.key(pygame.K_ESCAPE, keydown=True)))
self.assertFalse(textinput.active)
# Test mouse
textinput._selected = True
textinput._selection_time = 0
textinput.update(PygameEventUtils.middle_rect_click(textinput))
self.assertTrue(textinput._cursor_visible)
textinput._select_all()
textinput._selection_active = True
self.assertEqual(textinput._cursor_position, 6)
self.assertEqual(textinput._selection_box, [0, 6])
textinput.update(PygameEventUtils.middle_rect_click(textinput, evtype=pygame.MOUSEBUTTONDOWN))
self.assertEqual(textinput._selection_box, [0, 0])
# Check click pos
textinput._check_mouse_collide_input(PygameEventUtils.middle_rect_click(textinput)[0].pos)
self.assertEqual(textinput._cursor_position, 6)
# Test touch
textinput._cursor_position = 0
textinput._check_touch_collide_input(PygameEventUtils.middle_rect_click(textinput)[0].pos)
self.assertEqual(textinput._cursor_position, 6)
# Update mouse
for i in range(50):
textinput.update(PygameEventUtils.key(pygame.K_t, keydown=True, char='t'))
textinput._update_cursor_mouse(50)
textinput._cursor_render = True
textinput._render_cursor()
# Test multiple are selected
menu.add.text_input('title', password=True, input_underline='_').select()
self.assertRaises(pygame_menu.menu._MenuMultipleSelectedWidgetsException, lambda: menu.draw(surface))
textinput.clear()
textinput.select(update_menu=True)
menu.draw(surface)
# Clear the menu
self.assertEqual(menu._stats.removed_widgets, 0)
self.assertEqual(textinput.get_menu(), menu)
menu.clear()
self.assertIsNone(textinput.get_menu())
self.assertEqual(menu._stats.removed_widgets, 3)
menu.add.generic_widget(textinput)
self.assertEqual(textinput.get_menu(), menu)
menu.clear()
self.assertEqual(menu._stats.removed_widgets, 4)
def test_password(self) -> None:
"""
Test password.
"""
menu = MenuUtils.generic_menu()
password_input = menu.add.text_input('title', password=True, input_underline='_')
self.assertRaises(ValueError, # Password cannot be set
lambda: password_input.set_value('new_value'))
password_input.set_value('') # No error
password_input._selected = False
password_input.draw(surface)
password_input.select(update_menu=True)
password_input.draw(surface)
self.assertEqual(password_input.get_value(), '')
password_input.clear()
self.assertEqual(password_input.get_value(), '')
# Test none width password
password_input._password_char = ''
self.assertRaises(ValueError, lambda: password_input._apply_font())
def test_unicode(self) -> None:
"""
Test unicode support.
"""
menu = MenuUtils.generic_menu()
textinput = menu.add.text_input('title', input_underline='_')
textinput.set_value('tk')
# Test alt+x
textinput.update(PygameEventUtils.key(pygame.K_SPACE, keydown=True))
textinput.update(PygameEventUtils.key(pygame.K_2, keydown=True, char='2'))
textinput.update(PygameEventUtils.key(pygame.K_1, keydown=True, char='1'))
textinput.update(PygameEventUtils.key(pygame.K_1, keydown=True, char='5'))
self.assertEqual(textinput.get_value(), 'tk 215')
textinput.update(PygameEventUtils.keydown_mod_alt(pygame.K_x)) # convert 215 to unicode
self.assertEqual(textinput.get_value(), 'tkȕ')
textinput.update(PygameEventUtils.key(pygame.K_SPACE, keydown=True))
textinput.update(PygameEventUtils.key(pygame.K_SPACE, keydown=True))
textinput.update(PygameEventUtils.key(pygame.K_b, keydown=True, char='B'))
textinput.update(PygameEventUtils.key(pygame.K_1, keydown=True, char='1'))
textinput.update(PygameEventUtils.keydown_mod_alt(pygame.K_x)) # convert 215 to unicode
self.assertEqual(textinput.get_value(), 'tkȕ ±')
# Remove all
textinput.clear()
textinput.update(PygameEventUtils.key(pygame.K_b, keydown=True, char='B'))
textinput.update(PygameEventUtils.key(pygame.K_1, keydown=True, char='1'))
textinput.update(PygameEventUtils.keydown_mod_alt(pygame.K_x)) # convert 215 to unicode
self.assertEqual(textinput.get_value(), '±')
textinput.update(PygameEventUtils.keydown_mod_alt(pygame.K_x)) # convert same to unicode, do nothing
self.assertEqual(textinput.get_value(), '±')
# Test consecutive
textinput.update(PygameEventUtils.key(pygame.K_2, keydown=True, char='2'))
textinput.update(PygameEventUtils.key(pygame.K_0, keydown=True, char='0'))
textinput.update(PygameEventUtils.key(pygame.K_1, keydown=True, char='1'))
textinput.update(PygameEventUtils.key(pygame.K_3, keydown=True, char='3'))
textinput.update(PygameEventUtils.keydown_mod_alt(pygame.K_x)) # convert 215 to unicode
self.assertEqual(textinput.get_value(), '±–')
# Test 0x
textinput.clear()
PygameEventUtils.release_key_mod()
textinput.update(PygameEventUtils.key(pygame.K_0, keydown=True, char='0'))
self.assertEqual(textinput.get_value(), '0')
textinput.update(PygameEventUtils.key(pygame.K_x, keydown=True, char='x'))
self.assertEqual(textinput.get_value(), '0x')
textinput.update(PygameEventUtils.keydown_mod_alt(pygame.K_x))
self.assertEqual(textinput.get_value(), '0x')
textinput.update(PygameEventUtils.key(pygame.K_b, keydown=True, char='B'))
textinput.update(PygameEventUtils.key(pygame.K_1, keydown=True, char='1'))
self.assertEqual(textinput.get_value(), '0xB1')
textinput.update(PygameEventUtils.keydown_mod_alt(pygame.K_x))
self.assertEqual(textinput.get_value(), '±')
PygameEventUtils.release_key_mod()
textinput.update(PygameEventUtils.key(pygame.K_0, keydown=True, char='0'))
textinput.update(PygameEventUtils.key(pygame.K_x, keydown=True, char='x'))
textinput.update(PygameEventUtils.key(pygame.K_b, keydown=True, char='B'))
textinput.update(PygameEventUtils.key(pygame.K_1, keydown=True, char='1'))
textinput.update(PygameEventUtils.keydown_mod_alt(pygame.K_x))
self.assertEqual(textinput.get_value(), '±±')
# Test keyup
self.assertIn(pygame.K_1, textinput._keyrepeat_counters.keys())
self.assertFalse(textinput.update(
PygameEventUtils.key(pygame.K_1, keyup=True, char='1')))
self.assertNotIn(pygame.K_1, textinput._keyrepeat_counters.keys())
# Test tab
self.assertEqual(textinput._tab_size, 4)
textinput.update(PygameEventUtils.key(pygame.K_TAB, keydown=True))
self.assertEqual(textinput.get_value(), '±± ')
# Test invalid unicode
self.assertFalse(textinput.update(PygameEventUtils.key(pygame.K_1, keydown=True)))
# Test others
textinput._input_type = 'other'
self.assertTrue(textinput._check_input_type('-'))
self.assertFalse(textinput._check_input_type('x'))
textinput._maxwidth_update = None
self.assertIsNone(textinput._update_maxlimit_renderbox())
def test_undo_redo(self) -> None:
"""
Test undo/redo.
"""
menu = MenuUtils.generic_menu()
# Test maxchar and undo/redo
textinput = menu.add.text_input('title',
input_underline='_',
maxchar=20)
textinput.set_value('the size of this textinput is way greater than the limit')
self.assertEqual(textinput.get_value(), 'eater than the limit') # same as maxchar
self.assertEqual(textinput._cursor_position, 20)
textinput._undo() # This must set default at ''
self.assertEqual(textinput.get_value(), '')
textinput._redo()
self.assertEqual(textinput.get_value(), 'eater than the limit')
textinput.draw(surface)
textinput._copy()
textinput._paste()
textinput._block_copy_paste = False
textinput._select_all()
textinput._cut()
self.assertEqual(textinput.get_value(), '')
textinput._undo()
self.assertEqual(textinput.get_value(), 'eater than the limit')
self.assertEqual(textinput._history_index, 1)
textinput._history_index = 0
self.assertFalse(textinput._undo())
textinput._history_index = len(textinput._history) - 1
self.assertFalse(textinput._redo())
def test_copy_paste(self) -> None:
"""
Test copy/paste.
"""
menu = MenuUtils.generic_menu()
# Test copy/paste
textinput_nocopy = menu.add.text_input('title',
input_underline='_',
maxwidth=20,
copy_paste_enable=False)
textinput_nocopy.set_value('this cannot be copied')
textinput_nocopy._copy()
textinput_nocopy._paste()
textinput_nocopy._cut()
self.assertEqual(textinput_nocopy.get_value(), 'this cannot be copied')
# Test copy/paste without block
textinput_copy = menu.add.text_input('title',
input_underline='_',
maxwidth=20,
maxchar=20)
textinput_copy.set_value('this value should be cropped as this is longer than the max char')
self.assertFalse(textinput_copy._block_copy_paste)
textinput_copy._copy()
self.assertTrue(textinput_copy._block_copy_paste)
textinput_copy._block_copy_paste = False
textinput_copy._select_all()
textinput_copy._cut()
self.assertEqual(textinput_copy.get_value(), '')
textinput_copy._block_copy_paste = False
textinput_copy._paste()
# self.assertEqual(textinput_copy.get_value(), 'er than the max char')
textinput_copy._cut()
textinput_copy._block_copy_paste = False
# self.assertEqual(textinput_copy.get_value(), '')
textinput_copy._valid_chars = ['e', 'r']
textinput_copy._paste()
# Copy password
textinput_copy._password = True
self.assertFalse(textinput_copy._copy())
def test_overflow_removal(self) -> None:
"""
Test text with max width and right overflow removal.
"""
menu = MenuUtils.generic_menu()
menu._copy_theme()
menu._theme.widget_font_size = 20
textinput = menu.add.text_input(
'Some long text: ',
maxwidth=19,
textinput_id='long_text',
input_underline='_'
)
self.assertRaises(WidgetTransformationNotImplemented, lambda: textinput.resize())
self.assertRaises(WidgetTransformationNotImplemented, lambda: textinput.set_max_width())
self.assertRaises(WidgetTransformationNotImplemented, lambda: textinput.set_max_height())
self.assertRaises(WidgetTransformationNotImplemented, lambda: textinput.scale())
self.assertRaises(WidgetTransformationNotImplemented, lambda: textinput.rotate())
textinput.flip(True, True)
self.assertEqual(textinput._flip, (False, True))
# noinspection SpellCheckingInspection
textinput.set_value('aaaaaaaaaaaaaaaaaaaaaaaaaa')
self.assertEqual(textinput._cursor_position, 26)
self.assertEqual(textinput._renderbox, [1, 26, 25])
textinput.update(PygameEventUtils.key(pygame.K_BACKSPACE, keydown=True))
self.assertEqual(textinput._cursor_position, 25)
self.assertEqual(textinput._renderbox, [0, 25, 25])
textinput.update(PygameEventUtils.key(pygame.K_a, keydown=True, char='a'))
self.assertEqual(textinput._cursor_position, 26)
self.assertEqual(textinput._renderbox, [1, 26, 25])
textinput.update(PygameEventUtils.key(pygame.K_BACKSPACE, keydown=True))
self.assertEqual(textinput._cursor_position, 25)
self.assertEqual(textinput._renderbox, [0, 25, 25])
# noinspection PyTypeChecker
def test_textinput_underline(self) -> None:
"""
Test underline.
"""
# Test underline edge cases
theme = TEST_THEME.copy()
theme.title_font_size = 35
theme.widget_font_size = 25
menu = pygame_menu.Menu(
column_min_width=400,
height=300,
theme=theme,
title='Label',
onclose=pygame_menu.events.CLOSE,
width=400
)
textinput = menu.add.text_input('title', input_underline='_')
self.assertEqual(menu._widget_offset[1], 107 if PYGAME_V2 else 106)
self.assertEqual(textinput.get_width(), 376)
self.assertEqual(textinput._current_underline_string, '______________________________')
menu.render()
self.assertEqual((menu.get_width(widget=True), menu.get_width(inner=True)), (376, 400))
self.assertEqual(textinput.get_width(), 376)
self.assertEqual(textinput._current_underline_string, '______________________________')
menu.render()
self.assertEqual((menu.get_width(widget=True), menu.get_width(inner=True)), (376, 400))
textinput.set_title('nice')
self.assertEqual(textinput.get_width(), 379)
self.assertEqual(textinput._current_underline_string, '______________________________')
menu.render()
self.assertEqual((menu.get_width(widget=True), menu.get_width(inner=True)), (379, 400))
# noinspection SpellCheckingInspection
textinput.set_value('QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ')
self.assertEqual(textinput.get_width(), 712)
self.assertEqual(textinput._current_underline_string,
'____________________________________________________________')
menu.render()
self.assertEqual((menu.get_width(widget=True), menu.get_width(inner=True)), (712, 400))
textinput.set_padding(100)
self.assertEqual(textinput.get_width(), 912)
self.assertEqual(textinput._current_underline_string,
'____________________________________________________________')
menu.render()
self.assertEqual((menu.get_width(widget=True), menu.get_width(inner=True)), (912, 380))
textinput.set_padding(200)
self.assertEqual(textinput.get_width(), 1112)
self.assertEqual(textinput._current_underline_string,
'____________________________________________________________')
menu.render()
self.assertEqual((menu.get_width(widget=True), menu.get_width(inner=True)), (1112, 380))
# Test underline
textinput = menu.add.text_input('title: ')
textinput.set_value('this is a test value')
self.assertEqual(textinput.get_width(), 266)
menu.clear()
textinput = menu.add.text_input('title: ', input_underline='.-')
# noinspection SpellCheckingInspection
textinput.set_value('QQQQQQQQQQQQQQQ')
self.assertEqual(textinput.get_width(), 373)
self.assertEqual(textinput._current_underline_string, '.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-.-')
textinput = menu.add.text_input('title: ', input_underline='_', input_underline_len=10)
self.assertEqual(textinput._current_underline_string, '_' * 10)
# Text underline with different column widths
menu = pygame_menu.Menu(
column_max_width=200,
height=300,
theme=theme,
title='Label',
onclose=pygame_menu.events.CLOSE,
width=400
)
self.assertRaises(pygame_menu.menu._MenuSizingException, lambda: menu.add.frame_v(300, 100))
self.assertRaises(pygame_menu.menu._MenuSizingException, lambda: menu.add.frame_v(201, 100))
self.assertEqual(len(menu._widgets), 0)
textinput = menu.add.text_input('title', input_underline='_')
self.assertEqual(menu._widget_offset[1], 107 if PYGAME_V2 else 106)
self.assertEqual(textinput.get_width(), 178)
self.assertEqual(textinput._current_underline_string, '____________')
v_frame = menu.add.frame_v(150, 100, background_color=(20, 20, 20))
v_frame.pack(textinput)
self.assertEqual(menu._widget_offset[1], 76 if PYGAME_V2 else 75)
self.assertEqual(textinput.get_width(), 134)
self.assertEqual(textinput._current_underline_string, '________')
# Test cursor size
self.assertRaises(AssertionError, lambda: menu.add.text_input('title', cursor_size=(1, 0)))
self.assertRaises(AssertionError, lambda: menu.add.text_input('title', cursor_size=(-1, -1)))
self.assertRaises(AssertionError, lambda: menu.add.text_input('title', cursor_size=(1, 1, 0)))
self.assertRaises(AssertionError, lambda: menu.add.text_input('title', cursor_size=[1, 1]))
self.assertRaises(AssertionError, lambda: menu.add.text_input('title', cursor_size=(1.6, 2.5)))
textinput_cursor = menu.add.text_input('title', cursor_size=(10, 2))
self.assertEqual(textinput_cursor._cursor_size, (10, 2))
# noinspection PyArgumentEqualDefault,PyTypeChecker
def test_colorinput(self) -> None:
"""
Test ColorInput widget.
"""
def _assert_invalid_color(widg) -> None:
"""
Assert that the widget color is invalid.
:param widg: Widget object
"""
r, g, b | |
arg):
try: value = float(arg)
except: value = None
if self.__get_windowLength() != value:
self._needsUpdate = True
self.obj.setWindowLength(value)
windowLength = property(__get_windowLength, __set_windowLength)
# package: QualityControl
class base_outage(object):
def __init__(self, obj):
self.obj = obj
self._needsUpdate = False
def _sync_update(self):
if self._needsUpdate:
self.obj.lastModified = Core.Time.GMT()
self.obj.update()
self._needsUpdate = False
def _delete(self):
self.obj.detach()
def __get_last_modified(self):
return datetime.datetime(
*(time.strptime(
self.obj.lastModified.toString("%Y-%m-%dT%H:%M:%SZ"),
"%Y-%m-%dT%H:%M:%SZ")[0:6]
)
)
last_modified = property(__get_last_modified)
def __get_networkCode(self):
try:
return self.obj.waveformID().networkCode()
except ValueError:
return None
def __get_stationCode(self):
try:
return self.obj.waveformID().stationCode()
except ValueError:
return None
def __get_streamCode(self):
try:
return self.obj.waveformID().channelCode()
except ValueError:
return None
def __get_locationCode(self):
try:
return self.obj.waveformID().locationCode()
except ValueError:
return None
def __set_networkCode(self, arg):
if self.__get_networkCode() != arg:
self._needsUpdate = True
self.obj.waveformID().setNetworkCode(arg)
def __set_stationCode(self, arg):
if self.__get_stationCode() != arg:
self._needsUpdate = True
self.obj.waveformID().setStationCode(arg)
def __set_streamCode(self, arg):
if self.__get_streamCode() != arg:
self._needsUpdate = True
self.obj.waveformID().setChannelCode(arg)
def __set_locationCode(self, arg):
if self.__get_locationCode() != arg:
self._needsUpdate = True
self.obj.waveformID().setLocationCode(arg)
networkCode = property(__get_networkCode, __set_networkCode)
stationCode = property(__get_stationCode, __set_stationCode)
streamCode = property(__get_streamCode, __set_streamCode)
locationCode = property(__get_locationCode, __set_locationCode)
def __get_creatorID(self):
try: # @return: const std::string&
return py2unicode(self.obj.creatorID())
except ValueError:
return None
def __set_creatorID(self, arg):
try:
value = py2str(arg)
except Exception as e:
logs.error(str(e))
return
if self.__get_creatorID() != value:
self._needsUpdate = True
self.obj.setCreatorID(value)
creatorID = property(__get_creatorID, __set_creatorID)
def __get_created(self):
try: # @return: Seiscomp::Core::Time
return datetime.datetime(
*(time.strptime(
self.obj.created().toString("%Y-%m-%dT%H:%M:%SZ"),
"%Y-%m-%dT%H:%M:%SZ")[0:6]
)
)
except ValueError:
return None
def __set_created(self, arg):
value = None
if arg is not None:
try: value = Core.Time.FromString(py2str(arg), "%Y-%m-%d %H:%M:%S")
except: pass
if py2str(self.__get_created()) != py2str(arg):
self._needsUpdate = True
self.obj.setCreated(value)
created = property(__get_created, __set_created)
def __get_start(self):
try: # @return: Seiscomp::Core::Time
return datetime.datetime(
*(time.strptime(
self.obj.start().toString("%Y-%m-%dT%H:%M:%SZ"),
"%Y-%m-%dT%H:%M:%SZ")[0:6]
)
)
except ValueError:
return None
def __set_start(self, arg):
value = None
if arg is not None:
try: value = Core.Time.FromString(py2str(arg), "%Y-%m-%d %H:%M:%S")
except: pass
if py2str(self.__get_start()) != py2str(arg):
self._needsUpdate = True
self.obj.setStart(value)
start = property(__get_start, __set_start)
def __get_end(self):
# optional Attribute
try: # @return: Seiscomp::Core::Time
return datetime.datetime(
*(time.strptime(
self.obj.end().toString("%Y-%m-%dT%H:%M:%SZ"),
"%Y-%m-%dT%H:%M:%SZ")[0:6]
)
)
except ValueError:
return None
def __set_end(self, arg):
value = None
if arg is not None:
try: value = Core.Time.FromString(py2str(arg), "%Y-%m-%d %H:%M:%S")
except: pass
if py2str(self.__get_end()) != py2str(arg):
self._needsUpdate = True
self.obj.setEnd(value)
end = property(__get_end, __set_end)
# package: QualityControl
class base_qualitycontrol(object):
def __init__(self, obj):
self.obj = obj
self._needsUpdate = False
def _sync_update(self):
if self._needsUpdate:
self.obj.lastModified = Core.Time.GMT()
self.obj.update()
self._needsUpdate = False
def _delete(self):
self.obj.detach()
def __get_last_modified(self):
return datetime.datetime(
*(time.strptime(
self.obj.lastModified.toString("%Y-%m-%dT%H:%M:%SZ"),
"%Y-%m-%dT%H:%M:%SZ")[0:6]
)
)
last_modified = property(__get_last_modified)
def __get_publicID(self):
return self.obj.publicID()
def __set_publicID(self, arg):
if self.__get_publicID() != arg:
self._needsUpdate = True
self.obj.setPublicID(arg)
publicID = property(__get_publicID,__set_publicID)
def _new_qclog(self, **args):
publicID = args.get("publicID")
if publicID and DataModel.QCLog.Find(publicID): publicID = None
if publicID: obj = DataModel.QCLog.Create(publicID)
else: obj = DataModel.QCLog.Create()
try: obj.setWaveformID(args["waveformID"])
except KeyError: pass
try: obj.setCreatorID(args["creatorID"])
except KeyError: pass
try:
if args["created"] is None:
obj.setCreated(None)
else:
obj.setCreated(Core.Time.FromString(str(args["created"]), "%Y-%m-%d %H:%M:%S"))
except KeyError: pass
try: obj.setStart(Core.Time.FromString(str(args["start"]), "%Y-%m-%d %H:%M:%S"))
except KeyError: pass
try:
if args["end"] is None:
obj.setEnd(None)
else:
obj.setEnd(Core.Time.FromString(str(args["end"]), "%Y-%m-%d %H:%M:%S"))
except KeyError: pass
try: obj.setMessage(args["message"])
except KeyError: pass
if not self.obj.add(obj):
print("seiscomp3.DataModel.QualityControl: error adding QCLog")
return obj
def __get_qclog(self):
list = []
if dbQuery is None:
if (self.obj.qCLogCount()):
for i in range(self.obj.qCLogCount()):
obj = self.obj.qCLog(i)
obj.lastModified = Core.Time.GMT()
list.append(base_qclog(obj))
else:
# HACK to make last_modified usable ...
it = dbQuery.getObjects(self.obj, DataModel.QCLog.TypeInfo())
while it.get():
try:
obj = DataModel.QCLog.Cast(it.get())
obj.lastModified = it.lastModified()
list.append(base_qclog(obj))
except ValueError as e:
print(str(e))
it.step()
return list
_qCLog = property(__get_qclog)
def _new_waveformquality(self, **args):
try: obj = DataModel.WaveformQuality()
except KeyError: pass
try: obj.setWaveformID(args["waveformID"])
except KeyError: pass
try: obj.setCreatorID(args["creatorID"])
except KeyError: pass
try:
if args["created"] is None:
obj.setCreated(None)
else:
obj.setCreated(Core.Time.FromString(str(args["created"]), "%Y-%m-%d %H:%M:%S"))
except KeyError: pass
try: obj.setStart(Core.Time.FromString(str(args["start"]), "%Y-%m-%d %H:%M:%S"))
except KeyError: pass
try:
if args["end"] is None:
obj.setEnd(None)
else:
obj.setEnd(Core.Time.FromString(str(args["end"]), "%Y-%m-%d %H:%M:%S"))
except KeyError: pass
try: obj.setType(args["type"])
except KeyError: pass
try: obj.setParameter(args["parameter"])
except KeyError: pass
try: obj.setValue(args["value"])
except KeyError: pass
try: obj.setLowerUncertainty(args["lowerUncertainty"])
except KeyError: pass
try: obj.setUpperUncertainty(args["upperUncertainty"])
except KeyError: pass
try: obj.setWindowLength(args["windowLength"])
except KeyError: pass
if not self.obj.add(obj):
print("seiscomp3.DataModel.QualityControl: error adding WaveformQuality")
return obj
def __get_waveformquality(self):
list = []
if dbQuery is None:
if (self.obj.waveformQualityCount()):
for i in range(self.obj.waveformQualityCount()):
obj = self.obj.waveformQuality(i)
obj.lastModified = Core.Time.GMT()
list.append(base_waveformquality(obj))
else:
# HACK to make last_modified usable ...
i = 0
objects_left = self.obj.waveformQualityCount()
while objects_left > 0:
try:
obj = self.obj.waveformQuality(i)
try:
obj.lastModified = self.obj.lastModified
list.append(base_waveformquality(obj))
objects_left -= 1
except AttributeError:
try:
obj.lastModified = Core.Time.GMT()
list.append(base_waveformquality(obj))
objects_left -= 1
except:
logs.debug("got " + repr(obj) + " in __get_waveformquality(), objects_left=" + str(objects_left))
i += 1
except ValueError as e:
print(str(e))
return list
_waveformQuality = property(__get_waveformquality)
def _new_outage(self, **args):
try: obj = DataModel.Outage()
except KeyError: pass
try: obj.setWaveformID(args["waveformID"])
except KeyError: pass
try: obj.setCreatorID(args["creatorID"])
except KeyError: pass
try:
if args["created"] is None:
obj.setCreated(None)
else:
obj.setCreated(Core.Time.FromString(str(args["created"]), "%Y-%m-%d %H:%M:%S"))
except KeyError: pass
try: obj.setStart(Core.Time.FromString(str(args["start"]), "%Y-%m-%d %H:%M:%S"))
except KeyError: pass
try:
if args["end"] is None:
obj.setEnd(None)
else:
obj.setEnd(Core.Time.FromString(str(args["end"]), "%Y-%m-%d %H:%M:%S"))
except KeyError: pass
if not self.obj.add(obj):
print("seiscomp3.DataModel.QualityControl: error adding Outage")
return obj
def __get_outage(self):
list = []
if dbQuery is None:
if (self.obj.outageCount()):
for i in range(self.obj.outageCount()):
obj = self.obj.outage(i)
obj.lastModified = Core.Time.GMT()
list.append(base_outage(obj))
else:
# HACK to make last_modified usable ...
i = 0
objects_left = self.obj.outageCount()
while objects_left > 0:
try:
obj = self.obj.outage(i)
try:
obj.lastModified = self.obj.lastModified
list.append(base_outage(obj))
objects_left -= 1
except AttributeError:
try:
obj.lastModified = Core.Time.GMT()
list.append(base_outage(obj))
objects_left -= 1
except:
logs.debug("got " + repr(obj) + " in __get_outage(), objects_left=" + str(objects_left))
i += 1
except ValueError as e:
print(str(e))
return list
_outage = property(__get_outage)
# package: Inventory
class base_stationreference(object):
def __init__(self, obj):
self.obj = obj
self._needsUpdate = False
def _sync_update(self):
if self._needsUpdate:
self.obj.lastModified = Core.Time.GMT()
self.obj.update()
self._needsUpdate = False
def _delete(self):
self.obj.detach()
def __get_last_modified(self):
return datetime.datetime(
*(time.strptime(
self.obj.lastModified.toString("%Y-%m-%dT%H:%M:%SZ"),
"%Y-%m-%dT%H:%M:%SZ")[0:6]
)
)
last_modified = property(__get_last_modified)
def __get_stationID(self):
try: # @return: const std::string&
return py2unicode(self.obj.stationID())
except ValueError:
return None
def __set_stationID(self, arg):
try:
value = py2str(arg)
except Exception as e:
logs.error(str(e))
return
if self.__get_stationID() != value:
self._needsUpdate = True
self.obj.setStationID(value)
stationID = property(__get_stationID, __set_stationID)
# package: Inventory
class base_stationgroup(object):
def __init__(self, obj):
self.obj = obj
self._needsUpdate = False
def _sync_update(self):
if self._needsUpdate:
self.obj.lastModified = Core.Time.GMT()
self.obj.update()
self._needsUpdate = False
def _delete(self):
self.obj.detach()
def __get_last_modified(self):
return datetime.datetime(
*(time.strptime(
self.obj.lastModified.toString("%Y-%m-%dT%H:%M:%SZ"),
"%Y-%m-%dT%H:%M:%SZ")[0:6]
)
)
last_modified = property(__get_last_modified)
def __get_publicID(self):
return self.obj.publicID()
def __set_publicID(self, arg):
if self.__get_publicID() != arg:
self._needsUpdate = True
self.obj.setPublicID(arg)
publicID = property(__get_publicID,__set_publicID)
def __get_type(self):
# optional Attribute
try: # @return: StationGroupType
return self.obj.type()
except ValueError:
return None
def __set_type(self, arg):
if self.__get_type() != arg:
self._needsUpdate = True
self.obj.setType(arg)
type = property(__get_type, __set_type)
def __get_code(self):
try: # @return: const std::string&
return py2unicode(self.obj.code())
except ValueError:
return None
def __set_code(self, arg):
try:
value = py2str(arg)
except Exception as e:
logs.error(str(e))
return
if self.__get_code() != value:
self._needsUpdate = True
self.obj.setCode(value)
code = property(__get_code, __set_code)
def __get_start(self):
# optional Attribute
try: # @return: Seiscomp::Core::Time
return datetime.datetime(
*(time.strptime(
self.obj.start().toString("%Y-%m-%dT%H:%M:%SZ"),
"%Y-%m-%dT%H:%M:%SZ")[0:6]
)
)
except ValueError:
return None
def __set_start(self, arg):
value = None
if arg is not None:
try: value = Core.Time.FromString(py2str(arg), "%Y-%m-%d %H:%M:%S")
except: pass
if py2str(self.__get_start()) != py2str(arg):
self._needsUpdate = True
self.obj.setStart(value)
start = property(__get_start, __set_start)
def __get_end(self):
# optional Attribute
try: # @return: Seiscomp::Core::Time
return datetime.datetime(
*(time.strptime(
self.obj.end().toString("%Y-%m-%dT%H:%M:%SZ"),
"%Y-%m-%dT%H:%M:%SZ")[0:6]
)
)
except ValueError:
return None
def __set_end(self, arg):
value = None
if arg is not None:
try: value = Core.Time.FromString(py2str(arg), "%Y-%m-%d %H:%M:%S")
except: pass
if py2str(self.__get_end()) != py2str(arg):
self._needsUpdate = True
self.obj.setEnd(value)
end = property(__get_end, __set_end)
def __get_description(self):
try: # @return: const std::string&
return py2unicode(self.obj.description())
except ValueError:
return None
def __set_description(self, arg):
try:
value = py2str(arg)
except Exception as e:
logs.error(str(e))
return
if self.__get_description() != value:
self._needsUpdate = True
self.obj.setDescription(value)
description = property(__get_description, __set_description)
def __get_latitude(self):
# optional Attribute
try: # @return: double
return self.obj.latitude()
except ValueError:
return None
def __set_latitude(self, arg):
try: value = float(arg)
except: value = None
if self.__get_latitude() != value:
self._needsUpdate = True
self.obj.setLatitude(value)
latitude = property(__get_latitude, __set_latitude)
def __get_longitude(self):
# optional Attribute
try: # @return: double
return self.obj.longitude()
except ValueError:
return None
def __set_longitude(self, arg):
try: value = float(arg)
except: value = None
if self.__get_longitude() != value:
| |
= frappe.db.get_list('Quotation', filters={'party_name': name}, fields=['name'])
#opportunity_name = frappe.db.get_list('Opportunity', filters={'party_name': name}, fields=['name'])
#sales_order_name = frappe.db.get_list('Sales Order', filters={'customer': name}, fields=['name'])
#delivery_note_name = frappe.db.get_list('Delivery Note', filters={'customer': name}, fields=['name'])
#sales_invoice_name = frappe.db.get_list('Sales Invoice', filters={'customer': name}, fields=['name'])
#payment_entry_name = frappe.db.get_list('Payment Entry', filters={'party': name}, fields=['name'])
qtn_connections = {}
opp_connections = {}
so_connections = {}
dn_connections = {}
sinv_connections = {}
pe_connections = {}
connections = []
if quotation_count > 0 and doc_data:
qtn_connections['name'] = "Quotation"
qtn_connections['count'] = quotation_count
qtn_connections['icon'] = "https://erpcloud.systems/icons/quotation.png"
connections.append(qtn_connections)
if opportunity_count > 0 and doc_data:
opp_connections['name'] = "Opportunity"
opp_connections['count'] = opportunity_count
opp_connections['icon'] = "https://erpcloud.systems/icons/opportunity.png"
connections.append(opp_connections)
if sales_order_count > 0 and doc_data:
so_connections['name'] = "Sales Order"
so_connections['count'] = sales_order_count
so_connections['icon'] = "https://erpcloud.systems/icons/sales_order.png"
connections.append(so_connections)
if delivery_note_count > 0 and doc_data:
dn_connections['name'] = "Delivery Note"
dn_connections['count'] = delivery_note_count
dn_connections['icon'] = "https://erpcloud.systems/icons/delivery_note.png"
connections.append(dn_connections)
if sales_invoice_count > 0 and doc_data:
sinv_connections['name'] = "Sales Invoice"
sinv_connections['count'] = sales_invoice_count
sinv_connections['icon'] = "https://erpcloud.systems/icons/sales_invoice.png"
connections.append(sinv_connections)
if payment_entry_count > 0 and doc_data:
pe_connections['name'] = "Payment Entry"
pe_connections['count'] = payment_entry_count
pe_connections['icon'] = "https://erpcloud.systems/icons/payment_entry.png"
connections.append(pe_connections)
cust['conn'] = connections
if doc_data:
return cust
else:
return "لا يوجد عميل بهذا الاسم"
@frappe.whitelist()
def sales_order(name):
so = {}
doc_data = frappe.db.get_list('Sales Order', filters={'name': name},
fields=['name',
'customer',
'customer_name',
'transaction_date',
'delivery_date',
'status',
'tax_id',
'customer_group',
'territory',
'customer_address',
'address_display',
'contact_display',
'contact_mobile',
'contact_email',
'project',
'order_type',
'currency',
'conversion_rate',
'selling_price_list',
'price_list_currency',
'plc_conversion_rate',
'ignore_pricing_rule',
'set_warehouse',
'campaign',
'source',
'tc_name',
'terms',
'taxes_and_charges',
'payment_terms_template',
'sales_partner',
'commission_rate',
'total_commission',
'total_qty',
'base_total',
'base_net_total',
'total',
'net_total',
'base_total_taxes_and_charges',
'total_taxes_and_charges',
'apply_discount_on',
'base_discount_amount',
'additional_discount_percentage',
'discount_amount',
'base_grand_total',
'base_in_words',
'grand_total',
'in_words',
'docstatus'
])
for x in doc_data:
so['name'] = x.name
so['customer'] = x.customer
so['customer_name'] = x.customer_name
so['transaction_date'] = x.transaction_date
so['delivery_date'] = x.delivery_date
so['status'] = x.status
so['tax_id'] = x.order_type
so['customer_group'] = x.customer_group
so['territory'] = x.territory
so['customer_address'] = x.customer_address
so['address_display'] = x.address_display
so['contact_display'] = x.contact_display
so['contact_mobile'] = x.contact_mobile
so['contact_email'] = x.contact_email
so['project'] = x.project
so['order_type'] = x.order_type
so['currency'] = x.currency
so['conversion_rate'] = x.conversion_rate
so['selling_price_list'] = x.selling_price_list
so['price_list_currency'] = x.price_list_currency
so['plc_conversion_rate'] = x.plc_conversion_rate
so['set_warehouse'] = x.set_warehouse
so['campaign'] = x.campaign
so['source'] = x.source
so['tc_name'] = x.tc_name
so['terms'] = x.terms
so['taxes_and_charges'] = x.taxes_and_charges
so['payment_terms_template'] = x.payment_terms_template
so['sales_partner'] = x.sales_partner
so['commission_rate'] = x.commission_rate
so['total_commission'] = x.total_commission
so['total_qty'] = x.total_qty
so['base_total'] = x.base_total
so['base_net_total'] = x.base_net_total
so['total'] = x.total
so['net_total'] = x.net_total
so['base_total_taxes_and_charges'] = x.base_total_taxes_and_charges
so['total_taxes_and_charges'] = x.total_taxes_and_charges
so['apply_discount_on'] = x.apply_discount_on
so['base_discount_amount'] = x.base_discount_amount
so['additional_discount_percentage'] = x.additional_discount_percentage
so['discount_amount'] = x.discount_amount
so['base_grand_total'] = x.base_grand_total
so['base_in_words'] = x.base_in_words
so['grand_total'] = x.grand_total
so['in_words'] = x.in_words
so['docstatus'] = x.docstatus
child_data_1 = frappe.db.get_list('Sales Order Item', filters={'parent': name}, order_by='idx',
fields=[
'idx',
'name',
'delivery_date',
'item_code',
'item_name',
'description',
'item_group',
'brand',
'image',
'qty',
'stock_uom',
'uom',
'conversion_factor',
'stock_qty',
'price_list_rate',
'base_price_list_rate',
'margin_type',
'margin_rate_or_amount',
'rate_with_margin',
'discount_percentage',
'discount_amount',
'base_rate_with_margin',
'rate',
'net_rate',
'amount',
'item_tax_template',
'net_amount',
'base_rate',
'base_net_rate',
'base_amount',
'base_net_amount',
'billed_amt',
'valuation_rate',
'gross_profit',
'warehouse',
'prevdoc_docname',
'projected_qty',
'actual_qty',
'ordered_qty',
'planned_qty',
'work_order_qty',
'delivered_qty',
'produced_qty',
'returned_qty',
'additional_notes',
])
child_data_2 = frappe.db.get_list('Sales Taxes and Charges', filters={'parent': name}, order_by='idx',
fields=[
'idx',
'name',
'charge_type',
'row_id',
'account_head',
'description',
'cost_center',
'rate',
'account_currency',
'tax_amount',
'total',
'tax_amount_after_discount_amount',
'base_tax_amount',
'base_total',
'base_tax_amount_after_discount_amount',
])
child_data_3 = frappe.db.get_list('Payment Schedule', filters={'parent': name}, order_by='idx',
fields=[
'idx',
'name',
'payment_term',
'description',
'due_date',
'mode_of_payment',
'invoice_portion',
'discount_type',
'discount_date',
'discount',
'payment_amount',
'outstanding',
'paid_amount',
'discounted_amount',
'base_payment_amount',
])
if child_data_1 and doc_data:
so['items'] = child_data_1
if child_data_2 and doc_data:
so['taxes'] = child_data_2
if child_data_3 and doc_data:
so['payment_schedule'] = child_data_3
attachments = frappe.db.sql(""" Select file_name, file_url,
Date_Format(creation,'%d/%m/%Y') as date_added
from `tabFile` where `tabFile`.attached_to_doctype = "Sales Order"
and `tabFile`.attached_to_name = "{name}"
""".format(name=name), as_dict=1)
so['attachments'] = attachments
comments = frappe.db.sql(""" Select creation, (Select `tabUser`.full_name from `tabUser` where `tabUser`.name = `tabComment`.owner) as owner, content
from `tabComment` where `tabComment`.reference_doctype = "Sales Order"
and `tabComment`.reference_name = "{name}"
and `tabComment`.comment_type = "Comment"
""".format(name=name), as_dict=1)
so['comments'] = comments
print_formats = frappe.db.sql(
""" Select name from `tabPrint Format` where doc_type = "Sales Order" and disabled = 0 """, as_dict=1)
so['print_formats'] = print_formats
pf_standard = {}
pf_standard['name'] = "Standard"
print_formats.append(pf_standard)
sales_invoice = frappe.db.get_list('Sales Invoice Item', filters={'sales_order': name}, fields=['parent'], group_by='parent')
delivery_note = frappe.db.get_list('Delivery Note Item', filters={'against_sales_order': name}, fields=['parent'], group_by='parent')
material_request = frappe.db.get_list('Material Request Item', filters={'sales_order': name}, fields=['parent'], group_by='parent')
purchase_order = frappe.db.get_list('Purchase Order Item', filters={'sales_order': name}, fields=['parent'], group_by='parent')
quotation = frappe.db.get_list('Sales Order Item', filters={'parent': name, 'prevdoc_docname': ["!=", ""]}, fields=['prevdoc_docname'], group_by='prevdoc_docname')
payment_entry = frappe.db.get_list('Payment Entry Reference', filters={'reference_name': name}, fields=['parent'], group_by='parent')
sales_invoice_count = len(sales_invoice)
delivery_note_count = len(delivery_note)
material_request_count = len(material_request)
purchase_order_count = len(purchase_order)
quotation_count = len(quotation)
payment_entry_count = len(payment_entry)
sinv_connections = {}
dn_connections = {}
mr_connections = {}
po_connections = {}
qtn_connections = {}
pe_connections = {}
connections = []
if sales_invoice_count > 0 and doc_data:
sinv_connections['name'] = "Sales Invoice"
sinv_connections['count'] = sales_invoice_count
sinv_connections['icon'] = "https://erpcloud.systems/icons/sales_invoice.png"
connections.append(sinv_connections)
if delivery_note_count > 0 and doc_data:
dn_connections['name'] = "Delivery Note"
dn_connections['count'] = delivery_note_count
dn_connections['icon'] = "https://erpcloud.systems/icons/delivery_note.png"
connections.append(dn_connections)
if material_request_count > 0 and doc_data:
mr_connections['name'] = "Material Request"
mr_connections['count'] = material_request_count
mr_connections['icon'] = "https://erpcloud.systems/icons/material_request.png"
connections.append(mr_connections)
if purchase_order_count > 0 and doc_data:
po_connections['name'] = "Purchase Order"
po_connections['count'] = purchase_order_count
po_connections['icon'] = "https://erpcloud.systems/icons/purchase_order.png"
connections.append(po_connections)
if quotation_count > 0 and doc_data:
qtn_connections['name'] = "Quotation"
qtn_connections['count'] = quotation_count
qtn_connections['qtn_no']= quotation
qtn_connections['icon'] = "https://erpcloud.systems/icons/quotation.png"
connections.append(qtn_connections)
if payment_entry_count > 0 and doc_data:
pe_connections['name'] = "Payment Entry"
pe_connections['count'] = payment_entry_count
pe_connections['icon'] = "https://erpcloud.systems/icons/payment_entry.png"
connections.append(pe_connections)
so['conn'] = connections
if doc_data:
return so
else:
return "لا يوجد أمر بيع بهذا الاسم"
@frappe.whitelist()
def sales_invoice(name):
sinv = {}
doc_data = frappe.db.get_list('Sales Invoice', filters={'name': name},
fields=['name',
'customer',
'customer_name',
'posting_date',
'due_date',
'status',
'is_return',
'tax_id',
'customer_group',
'territory',
'customer_address',
'address_display',
'contact_display',
'contact_mobile',
'contact_email',
'project',
'cost_center',
'currency',
'conversion_rate',
'selling_price_list',
'price_list_currency',
'plc_conversion_rate',
'ignore_pricing_rule',
'set_warehouse',
'set_target_warehouse',
'update_stock',
'campaign',
'source',
'tc_name',
'terms',
'taxes_and_charges',
'payment_terms_template',
'sales_partner',
'commission_rate',
'total_commission',
'total_qty',
'base_total',
'base_net_total',
'total',
'net_total',
'base_total_taxes_and_charges',
'total_taxes_and_charges',
'apply_discount_on',
'base_discount_amount',
'additional_discount_percentage',
'discount_amount',
'base_grand_total',
'base_in_words',
'grand_total',
'in_words',
'docstatus'
])
for x in doc_data:
sinv['name'] = x.name
sinv['customer'] = x.customer
sinv['customer_name'] = x.customer_name
sinv['posting_date'] = x.posting_date
sinv['due_date'] = x.due_date
sinv['status'] = x.status
sinv['is_return'] = x.is_return
sinv['tax_id'] = x.order_type
sinv['customer_group'] = x.customer_group
sinv['territory'] = x.territory
sinv['customer_address'] = x.customer_address
sinv['address_display'] = x.address_display
sinv['contact_display'] = x.contact_display
sinv['contact_mobile'] = x.contact_mobile
sinv['contact_email'] = x.contact_email
sinv['project'] = x.project
sinv['cost_center'] = x.cost_center
sinv['currency'] = x.currency
sinv['conversion_rate'] = x.conversion_rate
sinv['selling_price_list'] = x.selling_price_list
sinv['price_list_currency'] = x.price_list_currency
sinv['plc_conversion_rate'] = x.plc_conversion_rate
sinv['update_stock'] = x.update_stock
sinv['set_warehouse'] = x.set_warehouse
sinv['set_target_warehouse'] = x.set_target_warehouse
sinv['tc_name'] = x.tc_name
sinv['terms'] = x.terms
sinv['taxes_and_charges'] = x.taxes_and_charges
sinv['payment_terms_template'] = x.payment_terms_template
sinv['sales_partner'] = x.sales_partner
sinv['commission_rate'] = x.commission_rate
sinv['total_commission'] = x.total_commission
sinv['total_qty'] = x.total_qty
sinv['base_total'] = x.base_total
sinv['base_net_total'] = x.base_net_total
sinv['total'] = x.total
sinv['net_total'] = x.net_total
sinv['base_total_taxes_and_charges'] = x.base_total_taxes_and_charges
sinv['total_taxes_and_charges'] = x.total_taxes_and_charges
sinv['apply_discount_on'] = x.apply_discount_on
sinv['base_discount_amount'] = x.base_discount_amount
sinv['additional_discount_percentage'] = x.additional_discount_percentage
sinv['discount_amount'] = x.discount_amount
sinv['base_grand_total'] = x.base_grand_total
sinv['base_in_words'] = x.base_in_words
sinv['grand_total'] = x.grand_total
sinv['in_words'] = x.in_words
sinv['docstatus'] = x.docstatus
child_data_1 = frappe.db.get_list('Sales Invoice Item', filters={'parent': name}, order_by='idx',
fields=[
'idx',
'name',
'item_code',
'item_name',
'description',
'item_group',
'brand',
'image',
'qty',
'stock_uom',
'uom',
'conversion_factor',
'stock_qty',
'price_list_rate',
'base_price_list_rate',
'margin_type',
'margin_rate_or_amount',
'rate_with_margin',
'discount_percentage',
'discount_amount',
'base_rate_with_margin',
'rate',
'net_rate',
'amount',
'item_tax_template',
'net_amount',
'base_rate',
'base_net_rate',
'base_amount',
'base_net_amount',
'warehouse',
'actual_qty',
'delivered_qty',
])
child_data_2 = frappe.db.get_list('Sales Taxes and Charges', filters={'parent': name}, order_by='idx',
fields=[
'idx',
'name',
'charge_type',
'row_id',
'account_head',
'description',
'cost_center',
'rate',
'account_currency',
'tax_amount',
'total',
'tax_amount_after_discount_amount',
'base_tax_amount',
'base_total',
'base_tax_amount_after_discount_amount',
])
child_data_3 = frappe.db.get_list('Payment Schedule', filters={'parent': name}, order_by='idx',
fields=[
'idx',
'name',
'payment_term',
'description',
'due_date',
'mode_of_payment',
'invoice_portion',
'discount_type',
'discount_date',
'discount',
'payment_amount',
'outstanding',
'paid_amount',
'discounted_amount',
'base_payment_amount',
],
)
if child_data_1 and doc_data:
sinv['items'] = child_data_1
if child_data_2 and doc_data:
sinv['taxes'] = child_data_2
if child_data_3 and doc_data:
sinv['payment_schedule'] = child_data_3
attachments = frappe.db.sql(""" Select file_name, file_url,
Date_Format(creation,'%d/%m/%Y') as date_added
from `tabFile` where `tabFile`.attached_to_doctype = "Sales Invoice"
and `tabFile`.attached_to_name = "{name}"
""".format(name=name), as_dict=1)
sinv['attachments'] = attachments
comments = frappe.db.sql(""" Select creation, (Select `tabUser`.full_name from `tabUser` where `tabUser`.name = `tabComment`.owner) as owner, content
from `tabComment` where `tabComment`.reference_doctype = "Sales Invoice"
and `tabComment`.reference_name = "{name}"
and `tabComment`.comment_type = "Comment"
""".format(name=name), as_dict=1)
sinv['comments'] = comments
print_formats = frappe.db.sql(""" Select name from `tabPrint Format` where doc_type = "Sales Invoice" and disabled = 0 """, as_dict=1)
sinv['print_formats'] = print_formats
pf_standard = {}
pf_standard['name'] = "Standard"
print_formats.append(pf_standard)
sales_order = frappe.db.get_list('Sales Invoice Item', filters={'parent': name}, fields=['sales_order'], group_by='sales_order')
delivery_note = frappe.db.get_list('Delivery Note Item', filters={'against_sales_invoice': name}, fields=['parent'], group_by='parent')
payment_entry = frappe.db.get_list('Payment Entry Reference', filters={'reference_name': name}, fields=['parent'], group_by='parent')
sales_order_count = len(sales_order)
delivery_note_count = len(delivery_note)
payment_entry_count = len(payment_entry)
so_connections = {}
dn_connections = {}
pe_connections = {}
connections = []
if sales_order_count > 0 and doc_data:
| |
else:
# Left edge but neither of the two left corners
#print("Left edge")
lo = Point(2)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
to_add.append(Rect(lo,rect.hi))
hi = Point(2)
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = other.lo.vals[1]-1
to_add.append(Rect(rect.lo,hi))
# Check to see if we have an overlap with left edge
if other.hi.vals[0] < rect.hi.vals[0]:
# No overlap with right edge
lo = Point(2)
hi = Point(2)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = other.lo.vals[1]
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = other.hi.vals[1]
to_add.append(Rect(lo,hi))
elif other.hi.vals[0] >= rect.hi.vals[0]:
if other.lo.vals[1] <= rect.lo.vals[1]:
#print("Lower right")
# Lower-right corner (can't overlap with any left corners)
hi = Point(2)
hi.vals[0] = other.lo.vals[0]-1
hi.vals[1] = rect.hi.vals[1]
to_add.append(Rect(rect.lo,hi))
if other.hi.vals[1] < rect.hi.vals[1]:
# No overlap with top-right corner
lo = Point(2)
lo.vals[0] = other.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
to_add.append(Rect(lo,rect.hi))
elif other.hi.vals[1] >= rect.hi.vals[1]:
#print("Upper right")
# Upper-right corner (can't overlap with any other corners)
hi = Point(2)
hi.vals[0] = other.lo.vals[0]-1
hi.vals[1] = rect.hi.vals[1]
to_add.append(Rect(rect.lo,hi))
lo = Point(2)
lo.vals[0] = other.lo.vals[0]
lo.vals[1] = rect.lo.vals[1]
hi = Point(2)
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = other.lo.vals[1]-1
to_add.append(Rect(lo,hi))
else:
#print("Right edge")
# Right edge (no overlap with left edge)
hi = Point(2)
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = other.lo.vals[1]-1
to_add.append(Rect(rect.lo,hi))
lo = Point(2)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
to_add.append(Rect(lo,rect.hi))
lo = Point(2)
hi = Point(2)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.lo.vals[1]
hi.vals[0] = other.lo.vals[0]-1
hi.vals[1] = other.hi.vals[1]
to_add.append(Rect(lo,hi))
else:
if other.lo.vals[1] <= rect.lo.vals[1]:
#print("Bottom edge")
# Bottom edge
hi = Point(2)
hi.vals[0] = other.lo.vals[0]-1
hi.vals[1] = rect.hi.vals[1]
to_add.append(Rect(rect.lo,hi))
lo = Point(2)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
to_add.append(Rect(lo,rect.hi))
# See if it intersects with top edge
if other.hi.vals[1] < rect.hi.vals[1]:
# Doesn't intersect
lo = Point(2)
hi = Point(2)
lo.vals[0] = other.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
hi.vals[0] = other.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
to_add.append(Rect(lo,hi))
elif other.hi.vals[1] >= rect.hi.vals[1]:
#print("Top edge")
# Top edge (can't overlap with bottom edge)
hi = Point(2)
hi.vals[0] = other.lo.vals[0]-1
hi.vals[1] = rect.hi.vals[1]
to_add.append(Rect(rect.lo,hi))
lo = Point(2)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
to_add.append(Rect(lo,rect.hi))
lo = Point(2)
hi = Point(2)
lo.vals[0] = other.lo.vals[0]
lo.vals[1] = rect.lo.vals[1]
hi.vals[0] = other.hi.vals[0]
hi.vals[1] = other.lo.vals[1]-1
to_add.append(Rect(lo,hi))
else:
#print("Center")
# Center (with no overlaps)
hi = Point(2)
hi.vals[0] = other.lo.vals[0]-1
hi.vals[1] = rect.hi.vals[1]
to_add.append(Rect(rect.lo,hi))
lo = Point(2)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
to_add.append(Rect(lo,rect.hi))
lo = Point(2)
hi = Point(2)
lo.vals[0] = other.lo.vals[0]
lo.vals[1] = rect.lo.vals[1]
hi.vals[0] = other.hi.vals[0]
hi.vals[1] = other.lo.vals[1]-1
to_add.append(Rect(lo,hi))
lo = Point(2)
hi = Point(2)
lo.vals[0] = other.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
hi.vals[0] = other.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
to_add.append(Rect(lo,hi))
elif rect.lo.dim == 3:
# 27 cases: 6 faces, 8 corners, 12 edges, 1 center
if other.lo.vals[0] <= rect.lo.vals[0]:
if other.lo.vals[1] <= rect.lo.vals[1]:
if other.lo.vals[2] <= rect.lo.vals[2]:
# Front lower-left corner
#print("Front lower left corner")
if other.hi.vals[0] >= rect.hi.vals[0]:
# Front lower-right corner
if other.hi.vals[1] >= rect.hi.vals[1]:
# Overlaps with front face
if other.hi.vals[2] >= rect.hi.vals[2]:
assert False # overlap all should never happen
else:
# Overlaps with just front face
lo = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = other.hi.vals[2]+1
to_add.append(Rect(lo,rect.hi))
else:
# Overlaps with front lower corners
if other.hi.vals[2] >= rect.hi.vals[2]:
# Overlaps with all lower corners
lo = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
lo.vals[2] = rect.lo.vals[2]
to_add.append(Rect(lo,rect.hi))
else:
# Only overlaps with front lower corners
lo = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = other.hi.vals[2]+1
to_add.append(Rect(lo,rect.hi))
lo = Point(3)
hi = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
lo.vals[2] = rect.lo.vals[2]
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = other.hi.vals[2]
to_add.append(Rect(lo,hi))
else:
# No overlap with front right corner
if other.hi.vals[1] >= rect.hi.vals[1]:
# Overlaps with front left corners
if other.hi.vals[2] >= rect.hi.vals[2]:
# Overlaps with all left corners
lo = Point(3)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = rect.lo.vals[2]
to_add.append(Rect(lo,rect.hi))
else:
# Only overlaps with front left corners
lo = Point(3)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = rect.lo.vals[2]
to_add.append(Rect(lo,rect.hi))
lo = Point(3)
hi = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = other.hi.vals[2]+1
hi.vals[0] = other.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = rect.hi.vals[2]
to_add.append(Rect(lo,hi))
else:
# Overlaps with front lower-left
if other.hi.vals[2] >= rect.hi.vals[2]:
# Overlaps front and back lower-left corners
lo = Point(3)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = rect.lo.vals[2]
to_add.append(Rect(lo,rect.hi))
lo = Point(3)
hi = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
lo.vals[2] = rect.lo.vals[2]
hi.vals[0] = other.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = rect.hi.vals[2]
to_add.append(Rect(lo,hi))
else:
# Overlaps with just front lower-left
lo = Point(3)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = rect.lo.vals[2]
to_add.append(Rect(lo,rect.hi))
lo = Point(3)
hi = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = other.hi.vals[2]+1
hi.vals[0] = other.hi.vals[0]
hi.vals[1] = other.hi.vals[1]
hi.vals[2] = rect.hi.vals[2]
to_add.append(Rect(lo,hi))
lo = Point(3)
hi = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
lo.vals[2] = rect.lo.vals[2]
hi.vals[0] = other.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = rect.hi.vals[2]
to_add.append(Rect(lo,hi))
elif other.hi.vals[2] >= rect.hi.vals[2]:
#print("Back lower left corner")
# Back lower-left corner (no overlap with any front corners)
hi = Point(3)
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = other.lo.vals[2]-1
to_add.append(Rect(rect.lo,hi))
if other.hi.vals[0] >= rect.hi.vals[0]:
# Overlap with back lower-right
if other.hi.vals[1] >= rect.hi.vals[1]:
# Overlap with back face so we're done
pass
else:
# Overlap with back lower corners
lo = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
lo.vals[2] = other.lo.vals[2]
to_add.append(Rect(lo,rect.hi))
else:
# No overlaps with back right corners
if other.hi.vals[1] >= rect.hi.vals[1]:
# Overlap with back upper-left
lo = Point(3)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = other.lo.vals[2]
to_add.append(Rect(lo,rect.hi))
else:
# Just back lower-left corner
lo = Point(3)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = other.lo.vals[2]
to_add.append(Rect(lo,rect.hi))
lo = Point(3)
hi = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
lo.vals[2] = other.lo.vals[2]
hi.vals[0] = other.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = rect.hi.vals[2]
to_add.append(Rect(lo,hi))
else:
#print("Lower left z edge")
# Lower-left z edge
hi = Point(3)
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = other.lo.vals[2]-1
to_add.append(Rect(rect.lo,hi))
lo = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = other.hi.vals[2]+1
to_add.append(Rect(lo,rect.hi))
if other.hi.vals[0] >= rect.hi.vals[0]:
if other.hi.vals[1] >= rect.hi.vals[1]:
# cutting plane through z so we're done
pass
else:
# only bottom half cutting plane
lo = Point(3)
hi = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
lo.vals[2] = other.lo.vals[2]
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = other.hi.vals[2]
to_add.append(Rect(lo,hi))
else:
if other.hi.vals[1] >= rect.hi.vals[1]:
# only left half cutting plane
lo = Point(3)
hi = Point(3)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = other.lo.vals[2]
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = other.hi.vals[2]
to_add.append(Rect(lo,hi))
else:
# only lower-left z edge
lo = Point(3)
hi = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.hi.vals[1]+1
lo.vals[2] = other.lo.vals[2]
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = other.hi.vals[2]
to_add.append(Rect(lo,hi))
lo = Point(3)
hi = Point(3)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = rect.lo.vals[1]
lo.vals[2] = other.lo.vals[2]
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = other.hi.vals[1]
hi.vals[2] = other.hi.vals[2]
to_add.append(Rect(lo,hi))
elif other.hi.vals[1] >= rect.hi.vals[1]:
if other.lo.vals[2] <= rect.lo.vals[2]:
#print("Front upper left corner")
# Front upper-left corner
# No overlap with any lower corners
if other.hi.vals[0] >= rect.hi.vals[0]:
if other.hi.vals[2] >= rect.hi.vals[2]:
# Overlap with whole top
hi = Point(3)
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = other.lo.vals[1]-1
hi.vals[2] = rect.hi.vals[2]
to_add.append(Rect(rect.lo,hi))
else:
# Overlap with front upper corners
hi = Point(3)
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = other.lo.vals[1]-1
hi.vals[2] = rect.hi.vals[2]
to_add.append(Rect(rect.lo,hi))
lo = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.lo.vals[1]
lo.vals[2] = other.hi.vals[2]+1
to_add.append(Rect(lo,rect.hi))
else:
if other.hi.vals[2] >= rect.hi.vals[2]:
# Overlap with left upper corners
hi = Point(3)
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = other.lo.vals[1]-1
hi.vals[2] = rect.hi.vals[2]
to_add.append(Rect(rect.lo,hi))
lo = Point(3)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = other.lo.vals[1]
lo.vals[2] = rect.lo.vals[2]
to_add.append(Rect(lo,rect.hi))
else:
# Overlap with just front upper-left
hi = Point(3)
hi.vals[0] = rect.hi.vals[0]
hi.vals[1] = other.lo.vals[1]-1
hi.vals[2] = rect.hi.vals[2]
to_add.append(Rect(rect.lo,hi))
lo = Point(3)
lo.vals[0] = other.hi.vals[0]+1
lo.vals[1] = other.lo.vals[1]
lo.vals[2] = rect.lo.vals[2]
to_add.append(Rect(lo,rect.hi))
lo = Point(3)
hi = Point(3)
lo.vals[0] = rect.lo.vals[0]
lo.vals[1] = other.lo.vals[1]
lo.vals[2] = other.hi.vals[2]+1
hi.vals[0] = other.hi.vals[0]
hi.vals[1] = rect.hi.vals[1]
hi.vals[2] = rect.hi.vals[2]
to_add.append(Rect(lo,hi))
elif other.hi.vals[2] >= rect.hi.vals[2]:
| |
self.registerNatlinkPyd(silent=1)
return result # None if something went wrong 1 if all OK
def removeNatlinkPyd(self):
"""remove the natlink.pyd file (Dragon should be switched off)
in order to redo the copyNatlinkPydPythonVersion again
"""
if not self.isElevated: raise ElevationError("needed for removing your previous natlink.pyd. Also close Dragon.")
# if self.isNatSpeakRunning(): raise NatSpeakRunningError("needed for removing your previous natlink.pyd")
coreDir = self.getCoreDirectory()
currentPydFile = os.path.join(coreDir, 'natlink.pyd')
if os.path.isfile(currentPydFile):
try:
os.remove(currentPydFile)
except (WindowsError, IOError):
fatal_error('cannot remove natlink.pyd from the core directory: %s\nProbably Dragon is running'% coreDir)
return
if os.path.isfile(currentPydFile):
fatal_error('strange, could not remove "natlink.pyd" from the core directory: "%s"Possibly Dragon is running'% coreDir)
return
# ok:
return 1 #
def copyNatlinkPydPythonVersion(self, wantedPydFile, currentPydFile):
"""copy the natlink.pyd from the correct version"""
if not self.isElevated: raise ElevationError("needed for copying the correct natlink.pyd file.")
# if self.isNatSpeakRunning(): raise NatSpeakRunningError("needed for rcopying the correct natlink.pyd file")
if os.path.isfile(currentPydFile):
self.unregisterNatlinkPyd()
try:
os.remove(currentPydFile)
except WindowsError:
fatal_error('cannot remove currentPydFile "%s",\nProbably you must exit Dragon first\nPossibly restart your computer.'% currentPydFile)
return
if os.path.isfile(wantedPydFile):
try:
shutil.copyfile(wantedPydFile, currentPydFile)
print 'copied pyd (=dll) file %s to %s'% (wantedPydFile, currentPydFile)
except:
fatal_error("Could not copy %s to %s\nProbably you need to exit Dragon first."% (wantedPydFile, currentPydFile))
return
else:
fatal_error("wantedPydFile %s is missing! Cannot copy to natlink.pyd/natlink.pyd"% wantedPydFile)
return
return 1
def getCoreDirectoryHKLMPythonPathDict(self, flags=win32con.KEY_ALL_ACCESS, recursive=False):
"""returns the dict that contains the PythonPath section of HKLM
Overload for config program, automatically set or repair the pythonpath variable if the format is not ok
"""
version = self.getPythonVersion()
if not version:
fatal_error("no valid Python version available")
return None, None
dottedVersion = version[0] + "." + version[1]
pythonPathSectionName = r"SOFTWARE\Python\PythonCore\%s\PythonPath"% dottedVersion
# key MUST already exist (ensure by passing flags=...:
#try:
lmPythonPathDict = RegistryDict.RegistryDict(win32con.HKEY_LOCAL_MACHINE, pythonPathSectionName, flags=flags)
#except:
# fatal_error("registry section for pythonpath does not exist yet: %s, probably invalid Python version: %s"%
# (pythonPathSectionName, version))
# return None, None
if 'NatLink' in lmPythonPathDict.keys():
subDict = lmPythonPathDict['NatLink']
if isinstance(subDict, RegistryDict.RegistryDict):
if '' in subDict.keys():
value = subDict['']
if value and type(value) in (str, unicode):
# all well (only the value is not tested yet):
return lmPythonPathDict, pythonPathSectionName
# not ok, repair the setting, admin rights needed:
if recursive:
fatal_error("Registry entry NatLink in pythonpath cannot be set correct, This can (hopefully) be solved by closing Dragon and then running the NatLink/Unimacro/Vocola Config program with administrator rights.run this program")
return None, None
print '==== Set NatLink setting in PythonPath section of registry to "%s"'% coreDir
lmPythonPathDict['NatLink'] = {'': coreDir}
return self.getHKLMPythonPathDict(recursive=True)
def checkPythonPathAndRegistry(self):
"""checks if core directory is
1. in the sys.path
### 2. in the registry keys of HKLM\SOFTWARE\Python\PythonCore\2.7\PythonPath\NatLink
the latter part is inserted again, as, for some reason the automatic loading of
natlinkmain needs the core directory in its path. Only take the core dir now!!
Instead the status.checkSysPath() function checks the existence of the core, base and user
directories in the sys.path and sets then if necessary.
If this last key is not there or empty
---set paths of coreDirectory
---register natlink.pyd
It is probably the first time to run this program.
If the settings are conflicting, either
---you want to reconfigure NatLink in a new place (these directories)
---you ran this program from a wrong place, exit and start again from the correct directory
"""
self.checkedUrgent = None
if __name__ == '__main__':
print "checking PythonPathAndRegistry"
try:
result = self.getHKLMPythonPathDict(flags=win32con.KEY_ALL_ACCESS)
if result is None:
pass
lmPythonPathDict, PythonPathSectionName = result
except (pywintypes.error, KeyError):
mess = 'The section "NatLink" does not exist and cannot be created in the registry. You probably should run this program with administrator rights'
self.warning(mess)
self.checkedUrgent = 1
if not self.isElevated: raise ElevationError("needed for fixing the PythonPath in the registry settings.")
coreDir2 = self.getCoreDirectory()
if coreDir2.lower() != coreDir.lower():
fatal_error('ambiguous core directory,\nfrom this module: %s\from status in natlinkstatus: %s'%
(coreDir, coreDir2))
# adding the relevant directories to the sys.path variable:
#self.checkSysPath() ## not needed in config program
pathString = coreDir
## if lmPythonPath:
## print 'lmPythonPath: ', lmPythonPath.keys()
result = lmPythonPathDict['NatLink']
if result and '' in result:
coreDirFromRegistry = lmPythonPathDict['NatLink']['']
if coreDirFromRegistry.lower() != coreDir.lower():
self.doFatalRegistryProblem(coreDirFromRegistry, coreDir)
return
else:
if not self.isElevated: raise ElevationError("needed for making changes in the PythonPath registry settings and register natlink.pyd.")
# first time install, silently register
self.registerNatlinkPyd(silent=1)
self.setNatlinkInPythonPathRegistry()
return 1
lmNatlinkPathDict = lmPythonPathDict['NatLink']
Keys = lmNatlinkPathDict.keys()
if not Keys:
# first time install Section is there, but apparently empty
if not self.isElevated: raise ElevationError("needed for making changes in the PythonPath registry settings and register natlink.pyd.")
self.registerNatlinkPyd(silent=1)
self.setNatlinkInPythonPathRegistry()
return 1
if Keys != [""]:
if not self.isElevated: raise ElevationError("needed for making changes in the PythonPath registry settings.")
if '' in Keys:
Keys.remove("")
fatal_error("The registry section of the pythonPathSection of HKEY_LOCAL_MACHINE:\n\tHKLM\%s\ncontains invalid keys: %s, remove them with the registry editor (regedit)\nAnd rerun this program"%
(PythonPathSectionName+r'\NatLink', Keys))
# now section has default "" key, proceed:
oldPathString = lmNatlinkPathDict[""]
if oldPathString.find(';') > 0:
print 'remove double entry, go back to single entry'
self.setNatlinkInPythonPathRegistry()
oldPathString = lmNatlinkPathDict[""]
if oldPathString.find(';') > 0:
fatal_error("did not fix double entry in registry setting of the pythonPathSection of HKEY_LOCAL_MACHINE:\n\tHKLM\%s\ncontains more entries separated by ';'. Remove with the registry editor (regedit)\nAnd rerun this program"%PythonPathSectionName+r'\NatLink')
if not oldPathString:
# empty setting, silently register
if not self.isElevated: raise ElevationError("needed for making changes in the PythonPath registry settings and register natlink.pyd.")
self.registerNatlinkPyd(silent=1)
self.setNatlinkInPythonPathRegistry()
return 1
if oldPathString.lower() == pathString.lower():
return 1 # OK
## not ok:
self.doFatalRegistryProblem(oldPathString, pathString)
def doFatalRegistryProblem(self, CoreDirFromRegistry, currentCoreDir):
"""registry does not match, make text and report
"""
# now for something more serious:::
text = \
"""
The PythonPath for NatLink does not match in registry with what this program
expects
---settings in Registry: %s
---wanted settings: %s
You probably just installed NatLink in a new location
and you ran the config program for the first time.
If you want the new settings, (re)register natlink.pyd (r)
And rerun this program...
Close %s (including Quick Start Mode), and all other Python applications
before rerunning this program. Possibly you have to restart your computer.
If you do NOT want these new settings, simply close this program and run
from the correct place.
"""% (CoreDirFromRegistry, currentCoreDir, self.DNSName)
self.warning(text)
self.checkedUrgent = 1
def checkIniFiles(self):
"""check if INI files are consistent
this is done through the
"""
if self.DNSInstallDir == -1:
return
if self.DNSIniDir == -1:
return
result = self.NatlinkIsEnabled(silent=1)
if result == None:
if not self.isElevated: raise ElevationError("needed for fixing the natlink enabled state")
# if self.isNatSpeakRunning(): raise NatSpeakRunningError("needed for fixing the natlink enabled state")
self.disableNatlink(silent=1)
result = self.NatlinkIsEnabled(silent=1)
if result == None:
text = \
"""NatLink INI file settings are inconsistent,
and cannot automatically be disabled.
Try to disable again, acquire administrator rights or report this issue
"""
self.warning(text)
return None
else:
text = \
"""NatLink INI file settings were inconsistent;
This has been repaired.
NatLink is now disabled.
"""
self.warning(text)
return 1
def warning(self,text):
"""is currently overloaded in GUI"""
if type(text) in (types.StringType, types.UnicodeType):
T = text
else:
# list probably:
T = '\n'.join(text)
print '-'*60
print T
print '='*60
return T
def error(self,text):
"""is currently overloaded in GUI"""
if type(text) in (types.StringType, types.UnicodeType):
T = text
else:
# list probably:
T = '\n'.join(text)
print '-'*60
print T
print '='*60
return T
def message(self, text):
"""prints message, can be overloaded in configureGUI
"""
if type(text) in (types.StringType, types.UnicodeType):
T = text
else:
# list probably:
T = '\n'.join(text)
print '-'*60
print T
print '='*60
def setstatus(self, text):
"""prints status, should be overloaded in configureGUI
"""
if type(text) in (types.StringType, types.UnicodeType):
T = text
else:
# list probably:
T = '\n'.join(text)
print '-'*60
print T
print '='*60
def isValidPath(self, Path, wantDirectory=None, wantFile=None):
"""return the path, if valid
otherwise return ""
same as function in natlinkstatus
"""
return natlinkstatus.isValidPath(Path, wantDirectory=wantDirectory, wantFile=wantFile)
def setNatlinkInPythonPathRegistry(self):
"""sets the HKLM setting of the Python registry
do this only when needed...
"""
lmPythonPathDict, pythonPathSectionName = self.getHKLMPythonPathDict(flags=win32con.KEY_ALL_ACCESS)
pathString = os.path.normpath(os.path.abspath(coreDir))
NatlinkSection = lmPythonPathDict.get('NatLink', | |
from z3 import *
from consts import METRICS_MAXIMIZE, METRICS_MINIMIZE
FeatureIndexMap = {}
FeatureVariable = []
FeatureIndexMap['eShop'] = 0
eShop = Bool('eShop')
FeatureVariable.append(eShop)
FeatureIndexMap['eShop'] = 1
eShop = Bool('eShop')
FeatureVariable.append(eShop)
FeatureIndexMap['store_front'] = 2
store_front = Bool('store_front')
FeatureVariable.append(store_front)
FeatureIndexMap['homepage'] = 3
homepage = Bool('homepage')
FeatureVariable.append(homepage)
FeatureIndexMap['_id_1'] = 4
_id_1 = Bool('_id_1')
FeatureVariable.append(_id_1)
FeatureIndexMap['_id_2'] = 5
_id_2 = Bool('_id_2')
FeatureVariable.append(_id_2)
FeatureIndexMap['_id_3'] = 6
_id_3 = Bool('_id_3')
FeatureVariable.append(_id_3)
FeatureIndexMap['_id_5'] = 7
_id_5 = Bool('_id_5')
FeatureVariable.append(_id_5)
FeatureIndexMap['special_offers'] = 8
special_offers = Bool('special_offers')
FeatureVariable.append(special_offers)
FeatureIndexMap['_id_6'] = 9
_id_6 = Bool('_id_6')
FeatureVariable.append(_id_6)
FeatureIndexMap['_id_8'] = 10
_id_8 = Bool('_id_8')
FeatureVariable.append(_id_8)
FeatureIndexMap['_id_9'] = 11
_id_9 = Bool('_id_9')
FeatureVariable.append(_id_9)
FeatureIndexMap['registration'] = 12
registration = Bool('registration')
FeatureVariable.append(registration)
FeatureIndexMap['registration_enforcement'] = 13
registration_enforcement = Bool('registration_enforcement')
FeatureVariable.append(registration_enforcement)
FeatureIndexMap['_id_11'] = 14
_id_11 = Bool('_id_11')
FeatureVariable.append(_id_11)
FeatureIndexMap['register_to_buy'] = 15
register_to_buy = Bool('register_to_buy')
FeatureVariable.append(register_to_buy)
FeatureIndexMap['_id_12'] = 16
_id_12 = Bool('_id_12')
FeatureVariable.append(_id_12)
FeatureIndexMap['_id_13'] = 17
_id_13 = Bool('_id_13')
FeatureVariable.append(_id_13)
FeatureIndexMap['_id_14'] = 18
_id_14 = Bool('_id_14')
FeatureVariable.append(_id_14)
FeatureIndexMap['shipping_address'] = 19
shipping_address = Bool('shipping_address')
FeatureVariable.append(shipping_address)
FeatureIndexMap['_id_15'] = 20
_id_15 = Bool('_id_15')
FeatureVariable.append(_id_15)
FeatureIndexMap['_id_16'] = 21
_id_16 = Bool('_id_16')
FeatureVariable.append(_id_16)
FeatureIndexMap['_id_17'] = 22
_id_17 = Bool('_id_17')
FeatureVariable.append(_id_17)
FeatureIndexMap['_id_18'] = 23
_id_18 = Bool('_id_18')
FeatureVariable.append(_id_18)
FeatureIndexMap['_id_19'] = 24
_id_19 = Bool('_id_19')
FeatureVariable.append(_id_19)
FeatureIndexMap['_id_20'] = 25
_id_20 = Bool('_id_20')
FeatureVariable.append(_id_20)
FeatureIndexMap['_id_21'] = 26
_id_21 = Bool('_id_21')
FeatureVariable.append(_id_21)
FeatureIndexMap['_id_22'] = 27
_id_22 = Bool('_id_22')
FeatureVariable.append(_id_22)
FeatureIndexMap['_id_23'] = 28
_id_23 = Bool('_id_23')
FeatureVariable.append(_id_23)
FeatureIndexMap['_id_25'] = 29
_id_25 = Bool('_id_25')
FeatureVariable.append(_id_25)
FeatureIndexMap['_id_26'] = 30
_id_26 = Bool('_id_26')
FeatureVariable.append(_id_26)
FeatureIndexMap['_id_27'] = 31
_id_27 = Bool('_id_27')
FeatureVariable.append(_id_27)
FeatureIndexMap['_id_28'] = 32
_id_28 = Bool('_id_28')
FeatureVariable.append(_id_28)
FeatureIndexMap['_id_29'] = 33
_id_29 = Bool('_id_29')
FeatureVariable.append(_id_29)
FeatureIndexMap['preferences'] = 34
preferences = Bool('preferences')
FeatureVariable.append(preferences)
FeatureIndexMap['_id_31'] = 35
_id_31 = Bool('_id_31')
FeatureVariable.append(_id_31)
FeatureIndexMap['_id_32'] = 36
_id_32 = Bool('_id_32')
FeatureVariable.append(_id_32)
FeatureIndexMap['_id_33'] = 37
_id_33 = Bool('_id_33')
FeatureVariable.append(_id_33)
FeatureIndexMap['_id_34'] = 38
_id_34 = Bool('_id_34')
FeatureVariable.append(_id_34)
FeatureIndexMap['quick_checkout_profile'] = 39
quick_checkout_profile = Bool('quick_checkout_profile')
FeatureVariable.append(quick_checkout_profile)
FeatureIndexMap['_id_35'] = 40
_id_35 = Bool('_id_35')
FeatureVariable.append(_id_35)
FeatureIndexMap['user_behaviour_tracking_info'] = 41
user_behaviour_tracking_info = Bool('user_behaviour_tracking_info')
FeatureVariable.append(user_behaviour_tracking_info)
FeatureIndexMap['catalog'] = 42
catalog = Bool('catalog')
FeatureVariable.append(catalog)
FeatureIndexMap['product_information'] = 43
product_information = Bool('product_information')
FeatureVariable.append(product_information)
FeatureIndexMap['product_type'] = 44
product_type = Bool('product_type')
FeatureVariable.append(product_type)
FeatureIndexMap['eletronic_goods'] = 45
eletronic_goods = Bool('eletronic_goods')
FeatureVariable.append(eletronic_goods)
FeatureIndexMap['physical_goods'] = 46
physical_goods = Bool('physical_goods')
FeatureVariable.append(physical_goods)
FeatureIndexMap['services'] = 47
services = Bool('services')
FeatureVariable.append(services)
FeatureIndexMap['basic_information'] = 48
basic_information = Bool('basic_information')
FeatureVariable.append(basic_information)
FeatureIndexMap['detailed_information'] = 49
detailed_information = Bool('detailed_information')
FeatureVariable.append(detailed_information)
FeatureIndexMap['warranty_information'] = 50
warranty_information = Bool('warranty_information')
FeatureVariable.append(warranty_information)
FeatureIndexMap['customer_reviews'] = 51
customer_reviews = Bool('customer_reviews')
FeatureVariable.append(customer_reviews)
FeatureIndexMap['associated_assets'] = 52
associated_assets = Bool('associated_assets')
FeatureVariable.append(associated_assets)
FeatureIndexMap['_id_38'] = 53
_id_38 = Bool('_id_38')
FeatureVariable.append(_id_38)
FeatureIndexMap['_id_39'] = 54
_id_39 = Bool('_id_39')
FeatureVariable.append(_id_39)
FeatureIndexMap['_id_41'] = 55
_id_41 = Bool('_id_41')
FeatureVariable.append(_id_41)
FeatureIndexMap['_id_43'] = 56
_id_43 = Bool('_id_43')
FeatureVariable.append(_id_43)
FeatureIndexMap['_id_44'] = 57
_id_44 = Bool('_id_44')
FeatureVariable.append(_id_44)
FeatureIndexMap['_id_45'] = 58
_id_45 = Bool('_id_45')
FeatureVariable.append(_id_45)
FeatureIndexMap['_id_46'] = 59
_id_46 = Bool('_id_46')
FeatureVariable.append(_id_46)
FeatureIndexMap['_id_47'] = 60
_id_47 = Bool('_id_47')
FeatureVariable.append(_id_47)
FeatureIndexMap['_id_48'] = 61
_id_48 = Bool('_id_48')
FeatureVariable.append(_id_48)
FeatureIndexMap['_id_49'] = 62
_id_49 = Bool('_id_49')
FeatureVariable.append(_id_49)
FeatureIndexMap['_id_50'] = 63
_id_50 = Bool('_id_50')
FeatureVariable.append(_id_50)
FeatureIndexMap['product_variants'] = 64
product_variants = Bool('product_variants')
FeatureVariable.append(product_variants)
FeatureIndexMap['_id_51'] = 65
_id_51 = Bool('_id_51')
FeatureVariable.append(_id_51)
FeatureIndexMap['size'] = 66
size = Bool('size')
FeatureVariable.append(size)
FeatureIndexMap['weight'] = 67
weight = Bool('weight')
FeatureVariable.append(weight)
FeatureIndexMap['availability'] = 68
availability = Bool('availability')
FeatureVariable.append(availability)
FeatureIndexMap['custom_fields'] = 69
custom_fields = Bool('custom_fields')
FeatureVariable.append(custom_fields)
FeatureIndexMap['categories'] = 70
categories = Bool('categories')
FeatureVariable.append(categories)
FeatureIndexMap['categories_catalog'] = 71
categories_catalog = Bool('categories_catalog')
FeatureVariable.append(categories_catalog)
FeatureIndexMap['_id_52'] = 72
_id_52 = Bool('_id_52')
FeatureVariable.append(_id_52)
FeatureIndexMap['_id_53'] = 73
_id_53 = Bool('_id_53')
FeatureVariable.append(_id_53)
FeatureIndexMap['_id_54'] = 74
_id_54 = Bool('_id_54')
FeatureVariable.append(_id_54)
FeatureIndexMap['_id_55'] = 75
_id_55 = Bool('_id_55')
FeatureVariable.append(_id_55)
FeatureIndexMap['_id_56'] = 76
_id_56 = Bool('_id_56')
FeatureVariable.append(_id_56)
FeatureIndexMap['_id_58'] = 77
_id_58 = Bool('_id_58')
FeatureVariable.append(_id_58)
FeatureIndexMap['_id_59'] = 78
_id_59 = Bool('_id_59')
FeatureVariable.append(_id_59)
FeatureIndexMap['_id_60'] = 79
_id_60 = Bool('_id_60')
FeatureVariable.append(_id_60)
FeatureIndexMap['_id_61'] = 80
_id_61 = Bool('_id_61')
FeatureVariable.append(_id_61)
FeatureIndexMap['category_page'] = 81
category_page = Bool('category_page')
FeatureVariable.append(category_page)
FeatureIndexMap['_id_62'] = 82
_id_62 = Bool('_id_62')
FeatureVariable.append(_id_62)
FeatureIndexMap['_id_63'] = 83
_id_63 = Bool('_id_63')
FeatureVariable.append(_id_63)
FeatureIndexMap['_id_65'] = 84
_id_65 = Bool('_id_65')
FeatureVariable.append(_id_65)
FeatureIndexMap['_id_66'] = 85
_id_66 = Bool('_id_66')
FeatureVariable.append(_id_66)
FeatureIndexMap['_id_67'] = 86
_id_67 = Bool('_id_67')
FeatureVariable.append(_id_67)
FeatureIndexMap['_id_68'] = 87
_id_68 = Bool('_id_68')
FeatureVariable.append(_id_68)
FeatureIndexMap['_id_69'] = 88
_id_69 = Bool('_id_69')
FeatureVariable.append(_id_69)
FeatureIndexMap['_id_70'] = 89
_id_70 = Bool('_id_70')
FeatureVariable.append(_id_70)
FeatureIndexMap['_id_71'] = 90
_id_71 = Bool('_id_71')
FeatureVariable.append(_id_71)
FeatureIndexMap['_id_72'] = 91
_id_72 = Bool('_id_72')
FeatureVariable.append(_id_72)
FeatureIndexMap['wish_list'] = 92
wish_list = Bool('wish_list')
FeatureVariable.append(wish_list)
FeatureIndexMap['wish_list_saved_after_session'] = 93
wish_list_saved_after_session = Bool('wish_list_saved_after_session')
FeatureVariable.append(wish_list_saved_after_session)
FeatureIndexMap['email_wish_list'] = 94
email_wish_list = Bool('email_wish_list')
FeatureVariable.append(email_wish_list)
FeatureIndexMap['_id_73'] = 95
_id_73 = Bool('_id_73')
FeatureVariable.append(_id_73)
FeatureIndexMap['permissions'] = 96
permissions = Bool('permissions')
FeatureVariable.append(permissions)
FeatureIndexMap['_id_75'] = 97
_id_75 = Bool('_id_75')
FeatureVariable.append(_id_75)
FeatureIndexMap['_id_76'] = 98
_id_76 = Bool('_id_76')
FeatureVariable.append(_id_76)
FeatureIndexMap['_id_77'] = 99
_id_77 = Bool('_id_77')
FeatureVariable.append(_id_77)
FeatureIndexMap['buy_paths'] = 100
buy_paths = Bool('buy_paths')
FeatureVariable.append(buy_paths)
FeatureIndexMap['_id_78'] = 101
_id_78 = Bool('_id_78')
FeatureVariable.append(_id_78)
FeatureIndexMap['_id_79'] = 102
_id_79 = Bool('_id_79')
FeatureVariable.append(_id_79)
FeatureIndexMap['_id_80'] = 103
_id_80 = Bool('_id_80')
FeatureVariable.append(_id_80)
FeatureIndexMap['_id_81'] = 104
_id_81 = Bool('_id_81')
FeatureVariable.append(_id_81)
FeatureIndexMap['_id_82'] = 105
_id_82 = Bool('_id_82')
FeatureVariable.append(_id_82)
FeatureIndexMap['_id_83'] = 106
_id_83 = Bool('_id_83')
FeatureVariable.append(_id_83)
FeatureIndexMap['_id_84'] = 107
_id_84 = Bool('_id_84')
FeatureVariable.append(_id_84)
FeatureIndexMap['registered_checkout'] = 108
registered_checkout = Bool('registered_checkout')
FeatureVariable.append(registered_checkout)
FeatureIndexMap['quick_checkout'] = 109
quick_checkout = Bool('quick_checkout')
FeatureVariable.append(quick_checkout)
FeatureIndexMap['_id_86'] = 110
_id_86 = Bool('_id_86')
FeatureVariable.append(_id_86)
FeatureIndexMap['_id_87'] = 111
_id_87 = Bool('_id_87')
FeatureVariable.append(_id_87)
FeatureIndexMap['shipping_options'] = 112
shipping_options = Bool('shipping_options')
FeatureVariable.append(shipping_options)
FeatureIndexMap['_id_88'] = 113
_id_88 = Bool('_id_88')
FeatureVariable.append(_id_88)
FeatureIndexMap['_id_89'] = 114
_id_89 = Bool('_id_89')
FeatureVariable.append(_id_89)
FeatureIndexMap['_id_90'] = 115
_id_90 = Bool('_id_90')
FeatureVariable.append(_id_90)
FeatureIndexMap['_id_91'] = 116
_id_91 = Bool('_id_91')
FeatureVariable.append(_id_91)
FeatureIndexMap['_id_92'] = 117
_id_92 = Bool('_id_92')
FeatureVariable.append(_id_92)
FeatureIndexMap['_id_93'] = 118
_id_93 = Bool('_id_93')
FeatureVariable.append(_id_93)
FeatureIndexMap['_id_95'] = 119
_id_95 = Bool('_id_95')
FeatureVariable.append(_id_95)
FeatureIndexMap['_id_96'] = 120
_id_96 = Bool('_id_96')
FeatureVariable.append(_id_96)
FeatureIndexMap['_id_98'] = 121
_id_98 = Bool('_id_98')
FeatureVariable.append(_id_98)
FeatureIndexMap['_id_99'] = 122
_id_99 = Bool('_id_99')
FeatureVariable.append(_id_99)
FeatureIndexMap['_id_100'] = 123
_id_100 = Bool('_id_100')
FeatureVariable.append(_id_100)
FeatureIndexMap['_id_101'] = 124
_id_101 = Bool('_id_101')
FeatureVariable.append(_id_101)
FeatureIndexMap['shipping_2'] = 125
shipping_2 = Bool('shipping_2')
FeatureVariable.append(shipping_2)
FeatureIndexMap['_id_102'] = 126
_id_102 = Bool('_id_102')
FeatureVariable.append(_id_102)
FeatureIndexMap['_id_103'] = 127
_id_103 = Bool('_id_103')
FeatureVariable.append(_id_103)
FeatureIndexMap['_id_105'] = 128
_id_105 = Bool('_id_105')
FeatureVariable.append(_id_105)
FeatureIndexMap['_id_106'] = 129
_id_106 = Bool('_id_106')
FeatureVariable.append(_id_106)
FeatureIndexMap['_id_107'] = 130
_id_107 = Bool('_id_107')
FeatureVariable.append(_id_107)
FeatureIndexMap['_id_108'] = 131
_id_108 = Bool('_id_108')
FeatureVariable.append(_id_108)
FeatureIndexMap['_id_110'] = 132
_id_110 = Bool('_id_110')
FeatureVariable.append(_id_110)
FeatureIndexMap['_id_111'] = 133
_id_111 = Bool('_id_111')
FeatureVariable.append(_id_111)
FeatureIndexMap['_id_112'] = 134
_id_112 = Bool('_id_112')
FeatureVariable.append(_id_112)
FeatureIndexMap['_id_114'] = 135
_id_114 = Bool('_id_114')
FeatureVariable.append(_id_114)
FeatureIndexMap['_id_115'] = 136
_id_115 = Bool('_id_115')
FeatureVariable.append(_id_115)
FeatureIndexMap['_id_116'] = 137
_id_116 = Bool('_id_116')
FeatureVariable.append(_id_116)
FeatureIndexMap['_id_117'] = 138
_id_117 = Bool('_id_117')
FeatureVariable.append(_id_117)
FeatureIndexMap['_id_118'] = 139
_id_118 = Bool('_id_118')
FeatureVariable.append(_id_118)
FeatureIndexMap['_id_120'] = 140
_id_120 = Bool('_id_120')
FeatureVariable.append(_id_120)
FeatureIndexMap['_id_121'] = 141
_id_121 = Bool('_id_121')
FeatureVariable.append(_id_121)
FeatureIndexMap['_id_122'] = 142
_id_122 = Bool('_id_122')
FeatureVariable.append(_id_122)
FeatureIndexMap['_id_123'] = 143
_id_123 = Bool('_id_123')
FeatureVariable.append(_id_123)
FeatureIndexMap['_id_124'] = 144
_id_124 = Bool('_id_124')
FeatureVariable.append(_id_124)
FeatureIndexMap['_id_125'] = 145
_id_125 = Bool('_id_125')
FeatureVariable.append(_id_125)
FeatureIndexMap['_id_126'] = 146
_id_126 = Bool('_id_126')
FeatureVariable.append(_id_126)
FeatureIndexMap['_id_127'] = 147
_id_127 = Bool('_id_127')
FeatureVariable.append(_id_127)
FeatureIndexMap['_id_128'] = 148
_id_128 = Bool('_id_128')
FeatureVariable.append(_id_128)
FeatureIndexMap['_id_129'] = 149
_id_129 = Bool('_id_129')
FeatureVariable.append(_id_129)
FeatureIndexMap['_id_130'] = 150
_id_130 = Bool('_id_130')
FeatureVariable.append(_id_130)
FeatureIndexMap['_id_132'] = 151
_id_132 = Bool('_id_132')
FeatureVariable.append(_id_132)
FeatureIndexMap['_id_133'] = 152
_id_133 = Bool('_id_133')
FeatureVariable.append(_id_133)
FeatureIndexMap['_id_134'] = 153
_id_134 = Bool('_id_134')
FeatureVariable.append(_id_134)
FeatureIndexMap['_id_135'] = 154
_id_135 = Bool('_id_135')
FeatureVariable.append(_id_135)
FeatureIndexMap['_id_136'] = 155
_id_136 = Bool('_id_136')
FeatureVariable.append(_id_136)
FeatureIndexMap['_id_137'] = 156
_id_137 = Bool('_id_137')
FeatureVariable.append(_id_137)
FeatureIndexMap['_id_138'] = 157
_id_138 = Bool('_id_138')
FeatureVariable.append(_id_138)
FeatureIndexMap['_id_139'] = 158
_id_139 = Bool('_id_139')
FeatureVariable.append(_id_139)
FeatureIndexMap['_id_141'] = 159
_id_141 = Bool('_id_141')
FeatureVariable.append(_id_141)
FeatureIndexMap['_id_142'] = 160
_id_142 = Bool('_id_142')
FeatureVariable.append(_id_142)
FeatureIndexMap['_id_143'] = 161
_id_143 = Bool('_id_143')
FeatureVariable.append(_id_143)
FeatureIndexMap['_id_144'] = 162
_id_144 = Bool('_id_144')
FeatureVariable.append(_id_144)
FeatureIndexMap['buy_paths_288_289'] = 163
buy_paths_288_289 = Bool('buy_paths_288_289')
FeatureVariable.append(buy_paths_288_289)
FeatureIndexMap['buy_paths_288_289_290'] = 164
buy_paths_288_289_290 = Bool('buy_paths_288_289_290')
FeatureVariable.append(buy_paths_288_289_290)
FeatureIndexMap['buy_paths_288_289_291'] = 165
buy_paths_288_289_291 = Bool('buy_paths_288_289_291')
FeatureVariable.append(buy_paths_288_289_291)
FeatureIndexMap['customer_service'] = 166
customer_service = Bool('customer_service')
FeatureVariable.append(customer_service)
FeatureIndexMap['_id_146'] = 167
_id_146 = Bool('_id_146')
FeatureVariable.append(_id_146)
FeatureIndexMap['_id_147'] = 168
_id_147 = Bool('_id_147')
FeatureVariable.append(_id_147)
FeatureIndexMap['_id_148'] = 169
_id_148 = Bool('_id_148')
FeatureVariable.append(_id_148)
FeatureIndexMap['_id_149'] = 170
_id_149 = Bool('_id_149')
FeatureVariable.append(_id_149)
FeatureIndexMap['_id_150'] = 171
_id_150 = Bool('_id_150')
FeatureVariable.append(_id_150)
FeatureIndexMap['_id_152'] = 172
_id_152 = Bool('_id_152')
FeatureVariable.append(_id_152)
FeatureIndexMap['_id_153'] = 173
_id_153 = Bool('_id_153')
FeatureVariable.append(_id_153)
FeatureIndexMap['_id_154'] = 174
_id_154 = Bool('_id_154')
FeatureVariable.append(_id_154)
FeatureIndexMap['_id_155'] = 175
_id_155 = Bool('_id_155')
FeatureVariable.append(_id_155)
FeatureIndexMap['_id_156'] = 176
_id_156 = Bool('_id_156')
FeatureVariable.append(_id_156)
FeatureIndexMap['_id_158'] = 177
_id_158 = Bool('_id_158')
FeatureVariable.append(_id_158)
FeatureIndexMap['_id_159'] = 178
_id_159 = Bool('_id_159')
FeatureVariable.append(_id_159)
FeatureIndexMap['user_behaviour_tracking'] = 179
user_behaviour_tracking = Bool('user_behaviour_tracking')
FeatureVariable.append(user_behaviour_tracking)
FeatureIndexMap['_id_160'] = 180
_id_160 = Bool('_id_160')
FeatureVariable.append(_id_160)
FeatureIndexMap['locally_visited_pages'] = 181
locally_visited_pages = Bool('locally_visited_pages')
FeatureVariable.append(locally_visited_pages)
FeatureIndexMap['external_referring_pages'] = 182
external_referring_pages = Bool('external_referring_pages')
FeatureVariable.append(external_referring_pages)
FeatureIndexMap['behaviour_tracked_previous_purchases'] = 183
behaviour_tracked_previous_purchases = Bool('behaviour_tracked_previous_purchases')
FeatureVariable.append(behaviour_tracked_previous_purchases)
FeatureIndexMap['business_management'] = 184
business_management = Bool('business_management')
FeatureVariable.append(business_management)
FeatureIndexMap['_id_162'] = 185
_id_162 = Bool('_id_162')
FeatureVariable.append(_id_162)
FeatureIndexMap['_id_163'] = 186
_id_163 = Bool('_id_163')
FeatureVariable.append(_id_163)
FeatureIndexMap['physical_goods_fulfillment'] = 187
physical_goods_fulfillment = Bool('physical_goods_fulfillment')
FeatureVariable.append(physical_goods_fulfillment)
FeatureIndexMap['warehouse_management'] = 188
warehouse_management = Bool('warehouse_management')
FeatureVariable.append(warehouse_management)
FeatureIndexMap['shipping'] = 189
shipping = Bool('shipping')
FeatureVariable.append(shipping)
FeatureIndexMap['_id_166'] = 190
_id_166 = Bool('_id_166')
FeatureVariable.append(_id_166)
FeatureIndexMap['_id_167'] = 191
_id_167 = Bool('_id_167')
FeatureVariable.append(_id_167)
FeatureIndexMap['_id_168'] = 192
_id_168 = Bool('_id_168')
FeatureVariable.append(_id_168)
FeatureIndexMap['_id_169'] = 193
_id_169 = Bool('_id_169')
FeatureVariable.append(_id_169)
FeatureIndexMap['_id_171'] = 194
_id_171 = Bool('_id_171')
FeatureVariable.append(_id_171)
FeatureIndexMap['_id_172'] = 195
_id_172 = Bool('_id_172')
FeatureVariable.append(_id_172)
FeatureIndexMap['_id_173'] = 196
_id_173 = Bool('_id_173')
FeatureVariable.append(_id_173)
FeatureIndexMap['_id_174'] = 197
_id_174 = Bool('_id_174')
FeatureVariable.append(_id_174)
FeatureIndexMap['_id_175'] = 198
_id_175 = Bool('_id_175')
FeatureVariable.append(_id_175)
FeatureIndexMap['_id_177'] = 199
_id_177 = Bool('_id_177')
FeatureVariable.append(_id_177)
FeatureIndexMap['_id_178'] = 200
_id_178 = Bool('_id_178')
FeatureVariable.append(_id_178)
FeatureIndexMap['_id_179'] = 201
_id_179 = Bool('_id_179')
FeatureVariable.append(_id_179)
FeatureIndexMap['_id_180'] = 202
_id_180 = Bool('_id_180')
FeatureVariable.append(_id_180)
FeatureIndexMap['_id_181'] = 203
_id_181 = Bool('_id_181')
FeatureVariable.append(_id_181)
FeatureIndexMap['eletronic_goods_fulfillment'] = 204
eletronic_goods_fulfillment = Bool('eletronic_goods_fulfillment')
FeatureVariable.append(eletronic_goods_fulfillment)
FeatureIndexMap['_id_182'] = 205
_id_182 = Bool('_id_182')
FeatureVariable.append(_id_182)
FeatureIndexMap['_id_183'] = 206
_id_183 = Bool('_id_183')
FeatureVariable.append(_id_183)
FeatureIndexMap['services_fulfillment'] = 207
services_fulfillment = Bool('services_fulfillment')
FeatureVariable.append(services_fulfillment)
FeatureIndexMap['_id_184'] = 208
_id_184 = Bool('_id_184')
FeatureVariable.append(_id_184)
FeatureIndexMap['_id_185'] = 209
_id_185 = Bool('_id_185')
FeatureVariable.append(_id_185)
FeatureIndexMap['_id_186'] = 210
_id_186 = Bool('_id_186')
FeatureVariable.append(_id_186)
FeatureIndexMap['_id_187'] = 211
_id_187 = Bool('_id_187')
FeatureVariable.append(_id_187)
FeatureIndexMap['customer_preferences'] = 212
customer_preferences = Bool('customer_preferences')
FeatureVariable.append(customer_preferences)
FeatureIndexMap['_id_189'] = 213
_id_189 = Bool('_id_189')
FeatureVariable.append(_id_189)
FeatureIndexMap['_id_190'] = 214
_id_190 = Bool('_id_190')
FeatureVariable.append(_id_190)
FeatureIndexMap['targeting_criteria_previous_purchases'] = 215
targeting_criteria_previous_purchases = Bool('targeting_criteria_previous_purchases')
FeatureVariable.append(targeting_criteria_previous_purchases)
FeatureIndexMap['_id_191'] = 216
_id_191 = Bool('_id_191')
FeatureVariable.append(_id_191)
FeatureIndexMap['wish_list_content'] = 217
wish_list_content = Bool('wish_list_content')
FeatureVariable.append(wish_list_content)
FeatureIndexMap['previously_visited_pages'] = 218
previously_visited_pages = Bool('previously_visited_pages')
FeatureVariable.append(previously_visited_pages)
FeatureIndexMap['_id_192'] = 219
_id_192 = Bool('_id_192')
FeatureVariable.append(_id_192)
FeatureIndexMap['_id_193'] = 220
_id_193 = Bool('_id_193')
FeatureVariable.append(_id_193)
FeatureIndexMap['_id_194'] = 221
_id_194 = Bool('_id_194')
FeatureVariable.append(_id_194)
FeatureIndexMap['_id_196'] = 222
_id_196 = Bool('_id_196')
FeatureVariable.append(_id_196)
FeatureIndexMap['_id_197'] = 223
_id_197 = Bool('_id_197')
FeatureVariable.append(_id_197)
FeatureIndexMap['_id_199'] = 224
_id_199 = Bool('_id_199')
FeatureVariable.append(_id_199)
FeatureIndexMap['_id_200'] = 225
_id_200 = Bool('_id_200')
FeatureVariable.append(_id_200)
FeatureIndexMap['_id_201'] = 226
_id_201 = Bool('_id_201')
FeatureVariable.append(_id_201)
FeatureIndexMap['_id_203'] = 227
_id_203 = Bool('_id_203')
FeatureVariable.append(_id_203)
FeatureIndexMap['_id_204'] = 228
_id_204 = Bool('_id_204')
FeatureVariable.append(_id_204)
FeatureIndexMap['_id_205'] = 229
_id_205 = Bool('_id_205')
FeatureVariable.append(_id_205)
FeatureIndexMap['_id_206'] = 230
_id_206 = Bool('_id_206')
FeatureVariable.append(_id_206)
FeatureIndexMap['_id_207'] = 231
_id_207 = Bool('_id_207')
FeatureVariable.append(_id_207)
FeatureIndexMap['discounts'] = 232
discounts = Bool('discounts')
FeatureVariable.append(discounts)
FeatureIndexMap['_id_208'] = 233
_id_208 = Bool('_id_208')
FeatureVariable.append(_id_208)
FeatureIndexMap['_id_209'] = 234
_id_209 = Bool('_id_209')
FeatureVariable.append(_id_209)
FeatureIndexMap['_id_210'] = 235
_id_210 = Bool('_id_210')
FeatureVariable.append(_id_210)
FeatureIndexMap['_id_211'] = 236
_id_211 = Bool('_id_211')
FeatureVariable.append(_id_211)
FeatureIndexMap['_id_212'] = 237
_id_212 = Bool('_id_212')
FeatureVariable.append(_id_212)
FeatureIndexMap['_id_214'] = 238
_id_214 = Bool('_id_214')
FeatureVariable.append(_id_214)
FeatureIndexMap['_id_215'] = 239
_id_215 = Bool('_id_215')
FeatureVariable.append(_id_215)
FeatureIndexMap['_id_216'] = 240
_id_216 = Bool('_id_216')
FeatureVariable.append(_id_216)
FeatureIndexMap['_id_217'] = 241
_id_217 = Bool('_id_217')
FeatureVariable.append(_id_217)
FeatureIndexMap['_id_218'] = 242
_id_218 = Bool('_id_218')
FeatureVariable.append(_id_218)
FeatureIndexMap['_id_219'] = 243
_id_219 = Bool('_id_219')
FeatureVariable.append(_id_219)
FeatureIndexMap['_id_220'] = 244
_id_220 = Bool('_id_220')
FeatureVariable.append(_id_220)
FeatureIndexMap['_id_222'] = 245
_id_222 = Bool('_id_222')
FeatureVariable.append(_id_222)
FeatureIndexMap['_id_223'] = 246
_id_223 = Bool('_id_223')
FeatureVariable.append(_id_223)
FeatureIndexMap['_id_224'] = 247
_id_224 = Bool('_id_224')
FeatureVariable.append(_id_224)
FeatureIndexMap['_id_225'] = 248
_id_225 = Bool('_id_225')
FeatureVariable.append(_id_225)
FeatureIndexMap['_id_226'] = 249
_id_226 = Bool('_id_226')
FeatureVariable.append(_id_226)
FeatureIndexMap['_id_228'] = 250
_id_228 = Bool('_id_228')
FeatureVariable.append(_id_228)
FeatureIndexMap['_id_229'] = 251
_id_229 = Bool('_id_229')
FeatureVariable.append(_id_229)
FeatureIndexMap['_id_230'] = 252
_id_230 = Bool('_id_230')
FeatureVariable.append(_id_230)
FeatureIndexMap['_id_231'] = 253
_id_231 = | |
'''
Takes some from twoobject_to_oneobject.py, but that thing was getting too long.
Jerk prediction models.
'''
import explicit_future_prediction_base as fp_base
import tensorflow as tf
from curiosity.models.model_building_blocks import ConvNetwithBypasses
import numpy as np
import cPickle
def deconv_loop(input_node, m, cfg, desc = 'deconv', bypass_nodes = None,
reuse_weights = False, batch_normalize = False, no_nonlinearity_end = False, do_print = True, return_bypass=False, sub_bypass = None, use_3d = False):
m.output = input_node
deconv_nodes = [input_node]
# deconvolving
deconv_depth = cfg[desc + '_depth']
print('deconv depth: %d' % deconv_depth)
cfs0 = None
if bypass_nodes is None:
bypass_nodes = [m.output]
for i in range(1, deconv_depth + 1):
with tf.variable_scope(desc + str(i)) as scope:
if reuse_weights:
scope.reuse_variables()
bypass = cfg[desc][i].get('bypass')
if bypass:
if type(bypass) == list:
bypass_node = [bypass_nodes[bp] for bp in bypass]
elif type(bypass) == dict:
if sub_bypass is None:
raise ValueError('Bypass \
is dict but no sub_bypass specified')
for k in bypass:
if int(k) == sub_bypass:
if type(bypass[k]) == list:
bypass_node = [bypass_nodes[bp] \
for bp in bypass[k]]
else:
bypass_node = bypass_nodes[bypass[k]]
else:
bypass_node = bypass_nodes[bypass]
m.add_bypass(bypass_node)
bn = cfg[desc][i]['deconv'].get('batch_normalize')
if bn:
norm_it = bn
else:
norm_it = batch_normalize
with tf.contrib.framework.arg_scope([m.deconv, m.deconv3d],
init='xavier', stddev=.01, bias=0, batch_normalize = norm_it):
cfs = cfg[desc][i]['deconv']['filter_size']
cfs0 = cfs
nf = cfg[desc][i]['deconv']['num_filters']
cs = cfg[desc][i]['deconv']['stride']
if 'output_shape' in cfg[desc][i]['deconv']:
out_shape = cfg[desc][i]['deconv']['output_shape']
else:
out_shape = None
if do_print:
print('deconv in: ', m.output)
if no_nonlinearity_end and i == deconv_depth:
if use_3d:
m.deconv3d(nf, cfs, cs, activation = None,
fixed_output_shape=out_shape)
else:
m.deconv(nf, cfs, cs, activation = None,
fixed_output_shape=out_shape)
else:
my_activation = cfg[desc][i].get('nonlinearity')
if my_activation is None:
my_activation = 'relu'
if use_3d:
m.deconv3d(nf, cfs, cs, activation = my_activation,
fixed_output_shape=out_shape)
else:
m.deconv(nf, cfs, cs, activation = my_activation,
fixed_output_shape=out_shape)
if do_print:
print('deconv out:', m.output)
#TODO add print function
pool = cfg[desc][i].get('pool')
if pool:
pfs = pool['size']
ps = pool['stride']
if use_3d:
m.pool3d(pfs, ps)
else:
m.pool(pfs, ps)
deconv_nodes.append(m.output)
bypass_nodes.append(m.output)
if return_bypass:
return [deconv_nodes, bypass_nodes]
return deconv_nodes
def feedforward_conv_loop(input_node, m, cfg, desc = 'encode', bypass_nodes = None, reuse_weights = False, batch_normalize = False, no_nonlinearity_end = False, do_print=True, return_bypass=False, sub_bypass = None, use_3d=False):
m.output = input_node
encode_nodes = [input_node]
#encoding
encode_depth = cfg[desc + '_depth']
print('conv depth: %d' % encode_depth)
cfs0 = None
if bypass_nodes is None:
bypass_nodes = [m.output]
for i in range(1, encode_depth + 1):
#not sure this usage ConvNet class creates exactly the params that we want to have, specifically in the 'input' field, but should give us an accurate record of this network's configuration
with tf.variable_scope(desc + str(i)) as scope:
if reuse_weights:
scope.reuse_variables()
bypass = cfg[desc][i].get('bypass')
if bypass:
if type(bypass) == list:
bypass_node = [bypass_nodes[bp] for bp in bypass]
elif type(bypass) == dict:
if sub_bypass is None:
raise ValueError('Bypass is dict but no sub_bypass specified')
for k in bypass:
if int(k) == sub_bypass:
if type(bypass[k]) == list:
bypass_node = [bypass_nodes[bp] \
for bp in bypass[k]]
else:
bypass_node = bypass_nodes[bypass[k]]
else:
bypass_node = bypass_nodes[bypass]
m.add_bypass(bypass_node)
bn = cfg[desc][i]['conv'].get('batch_normalize')
if bn:
norm_it = bn
else:
norm_it = batch_normalize
with tf.contrib.framework.arg_scope([m.conv, m.conv3d], init='xavier', stddev=.01, bias=0, batch_normalize = norm_it):
cfs = cfg[desc][i]['conv']['filter_size']
cfs0 = cfs
nf = cfg[desc][i]['conv']['num_filters']
cs = cfg[desc][i]['conv']['stride']
if do_print:
print('conv in', m.output)
if no_nonlinearity_end and i == encode_depth:
if use_3d:
m.conv3d(nf, cfs, cs, activation = None)
else:
m.conv(nf, cfs, cs, activation = None)
else:
my_activation = cfg[desc][i].get('nonlinearity')
if my_activation is None:
my_activation = 'relu'
else:
print('NONLIN: ' + my_activation)
if use_3d:
m.conv3d(nf, cfs, cs, activation = my_activation)
else:
m.conv(nf, cfs, cs, activation = my_activation)
if do_print:
print('conv out', m.output)
#TODO add print function
pool = cfg[desc][i].get('pool')
if pool:
pfs = pool['size']
ps = pool['stride']
if use_3d:
m.pool3d(pfs, ps)
else:
m.pool(pfs, ps)
encode_nodes.append(m.output)
bypass_nodes.append(m.output)
if return_bypass:
return [encode_nodes, bypass_nodes]
return encode_nodes
def hidden_loop_with_bypasses(input_node, m, cfg, nodes_for_bypass = [], stddev = .01, reuse_weights = False, activation = 'relu', train = False):
assert len(input_node.get_shape().as_list()) == 2, len(input_node.get_shape().as_list())
hidden_depth = cfg['hidden_depth']
m.output = input_node
print('in hidden loop')
print(m.output)
for i in range(1, hidden_depth + 1):
with tf.variable_scope('hidden' + str(i)) as scope:
if reuse_weights:
scope.reuse_variables()
bypass = cfg['hidden'][i].get('bypass')
if bypass:
bypass_node = nodes_for_bypass[bypass]
m.add_bypass(bypass_node)
nf = cfg['hidden'][i]['num_features']
my_activation = cfg['hidden'][i].get('activation')
if my_activation is None:
my_activation = activation
if train:
my_dropout = cfg['hidden'][i].get('dropout')
else:
my_dropout = None
m.fc(nf, init = 'xavier', activation = my_activation, bias = .01, stddev = stddev, dropout = my_dropout)
nodes_for_bypass.append(m.output)
print(m.output)
return m.output
def just_actions_bench(inputs, cfg = None, num_classes = None, time_seen = None, normalization_method = None, stats_file = None, add_gaussians = True, image_height = None, image_width = None, **kwargs):
base_net = fp_base.ShortLongFuturePredictionBase(inputs, store_jerk = True, normalization_method = normalization_method,
time_seen = time_seen, stats_file = stats_file, add_gaussians = add_gaussians, img_height = image_height,
img_width = image_width)
m = ConvNetwithBypasses(**kwargs)
act_node = base_net.inputs['actions_no_pos']
act_shape = act_node.get_shape().as_list()
batch_size = act_shape[0]
m.output = act_node
act_node = m.reshape([np.prod(act_shape[1:])])
pred = hidden_loop_with_bypasses(m.output, m, cfg, reuse_weights = False, train = kwargs['train'])
pred_shape = pred.get_shape().as_list()
if num_classes is not None:
pred_shape.append(int(num_classes))
pred_shape[1] = int(pred_shape[1] / num_classes)
pred = tf.reshape(pred, pred_shape)
retval = {'pred' : pred}
retval.update(base_net.inputs)
return retval, m.params
def basic_jerk_bench(inputs, cfg = None, num_classes = None, time_seen = None, normalization_method = None, stats_file = None, add_gaussians = True, image_height = None, image_width = None, **kwargs):
base_net = fp_base.ShortLongFuturePredictionBase(inputs, store_jerk = True, normalization_method = normalization_method,
time_seen = time_seen, stats_file = stats_file, add_gaussians = add_gaussians, img_height = image_height,
img_width = image_width)
m = ConvNetwithBypasses(**kwargs)
in_node = base_net.inputs['object_data_seen_1d']
in_shape = in_node.get_shape().as_list()
m.output = in_node
in_node = m.reshape([np.prod(in_shape[1:])])
act_node = base_net.inputs['actions_no_pos']
act_shape = act_node.get_shape().as_list()
batch_size = act_shape[0]
m.output = act_node
act_node = m.reshape([np.prod(act_shape[1:])])
depth_node = tf.reshape(base_net.inputs['depth_seen'], [batch_size, -1])
m.output = tf.concat([in_node, act_node, depth_node], axis = 1)
pred = hidden_loop_with_bypasses(m.output, m, cfg, reuse_weights = False, train = kwargs['train'])
pred_shape = pred.get_shape().as_list()
if num_classes is not None:
pred_shape.append(int(num_classes))
pred_shape[1] = int(pred_shape[1] / num_classes)
pred = tf.reshape(pred, pred_shape)
retval = {'pred' : pred}
retval.update(base_net.inputs)
return retval, m.params
def mom_model_step2(inputs, cfg = None, time_seen = None, normalization_method = None,
stats_file = None, obj_pic_dims = None, scale_down_height = None,
scale_down_width = None, add_depth_gaussian = False, add_gaussians = False,
use_segmentation = False, use_vel = False, include_pose = False,
use_only_t1 = True, do_reconstruction = False,
num_classes = None, keep_prob = None, gpu_id = 0, **kwargs):
print('------NETWORK START-----')
with tf.device('/gpu:%d' % gpu_id):
# rescale inputs to be divisible by 8
rinputs = {}
for k in inputs:
if k in ['depths', 'objects', 'vels', 'accs', 'jerks',
'vels_curr', 'accs_curr', 'actions_map', 'segmentation_map']:
rinputs[k] = tf.pad(inputs[k],
[[0,0], [0,0], [0,0], [3,3], [0,0]], "CONSTANT")
# RESIZING IMAGES
rinputs[k] = tf.unstack(rinputs[k], axis=1)
for i, _ in enumerate(rinputs[k]):
rinputs[k][i] = tf.image.resize_images(rinputs[k][i], [64, 88])
rinputs[k] = tf.stack(rinputs[k], axis=1)
else:
rinputs[k] = inputs[k]
# preprocess input data
batch_size, time_seen = rinputs['depths'].get_shape().as_list()[:2]
time_seen -= 1
long_len = rinputs['object_data'].get_shape().as_list()[1]
base_net = fp_base.ShortLongFuturePredictionBase(
rinputs, store_jerk = True,
normalization_method = normalization_method,
time_seen = time_seen, stats_file = stats_file,
scale_down_height = scale_down_height,
scale_down_width = scale_down_width,
add_depth_gaussian = add_depth_gaussian,
add_gaussians = add_gaussians,
get_hacky_segmentation_map = True,
get_actions_map = True)
inputs = base_net.inputs
# init network
m = ConvNetwithBypasses(**kwargs)
# encode per time step
main_attributes = ['depths']
if use_vel:
print('Using current velocities as input')
main_attributes.append('vels_curr_normed')
if use_segmentation:
print('Using segmentations as input')
main_attributes.append('segmentation_map')
main_input_per_time = [tf.concat([tf.cast(inputs[nm][:, t], tf.float32) \
for nm in main_attributes], axis = 3) for t in range(time_seen)]
if do_reconstruction:
print('Doing reconstruction only!')
main_input_per_time = []
for t in range(time_seen):
inp_t = tf.concat([inputs['depths'][:,t], inputs['vels_normed'][:,t+1]],
axis = 3)
main_input_per_time.append(inp_t)
# initial bypass
bypass_nodes = [inputs['depths'][:, 1], inputs['vels_curr_normed'][:, 1]]
if do_reconstruction:
bypass_nodes = [inputs['depths'][:, 1], inputs['vels_normed'][:, 2]]
# conditioning
if 'use_cond' in cfg:
use_cond = cfg['use_cond']
else:
use_cond = False
if use_cond:
print('Using ACTION CONDITIONING')
cond_attributes = ['actions_map']
inputs['actions_map'] = tf.reduce_sum(inputs['actions_map'], axis=-1)
if 'cond_scale_factor' in cfg:
scale_factor = cfg['cond_scale_factor']
else:
scale_factor = 1
for att in cond_attributes:
shape = inputs[att].get_shape().as_list()
inputs[att] = tf.unstack(inputs[att], axis=1)
for t, _ in enumerate(inputs[att]):
inputs[att][t] = tf.image.resize_images(inputs[att][t],
[shape[2]/scale_factor, shape[3]/scale_factor],
method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)
inputs[att] = tf.stack(inputs[att], axis=1)
cond_input_per_time = [tf.concat([inputs[nm][:, t] \
for nm in cond_attributes], axis = 3) for t in range(time_seen)]
encoded_input_cond = []
reuse_weights = False
for t in range(time_seen):
if use_only_t1 and t != 1:
continue
enc, bypass_nodes = feedforward_conv_loop(
cond_input_per_time[t], m, | |
= cwt.encode(
{
"iss": "coaps://as.example",
"sub": "dajiaji",
"cti": "123",
"cnf": {
"jwk": {
"kty": "OKP",
"use": "sig",
"crv": "Ed25519",
"kid": "01",
"x": "<KEY>",
"alg": "EdDSA",
},
},
},
private_key,
)
# presenter:
msg = b"could-you-sign-this-message?" # Provided by recipient.
pop_key_private = COSEKey.from_jwk(
{
"kty": "OKP",
"d": "<KEY>",
"use": "sig",
"crv": "Ed25519",
"kid": "01",
"x": "<KEY>",
"alg": "EdDSA",
}
)
sig = pop_key_private.sign(msg)
# recipient:
with open(key_path("public_key_ed25519.pem")) as key_file:
public_key = COSEKey.from_pem(key_file.read(), kid="issuer-01")
decoded = cwt.decode(token, public_key)
assert 8 in decoded and isinstance(decoded[8], dict)
assert 1 in decoded[8] and isinstance(decoded[8][1], dict)
c = Claims.new(decoded)
extracted = COSEKey.new(c.cnf)
try:
extracted.verify(msg, sig)
except Exception:
pytest.fail("verify should not fail.")
def test_sample_readme_cwt_with_pop_encrypted_cose_key_readable(self):
with open(key_path("private_key_ed25519.pem")) as key_file:
private_key = COSEKey.from_pem(key_file.read(), kid="issuer-01")
enc_key = COSEKey.from_symmetric_key(
"a-client-secret-of-cwt-recipient", # Just 32 bytes!
alg="ChaCha20/Poly1305",
kid="presenter-01",
)
pop_key = COSEKey.from_symmetric_key(
"a-client-secret-of-cwt-presenter",
alg="HMAC 256/256",
)
token = cwt.encode(
{
"iss": "coaps://as.example",
"sub": "dajiaji",
"cti": "123",
"cnf": {
# 'eck'(Encrypted Cose Key) is a keyword defined by this library.
"eck": EncryptedCOSEKey.from_cose_key(pop_key, enc_key),
},
},
private_key,
)
with open(key_path("public_key_ed25519.pem")) as key_file:
public_key = COSEKey.from_pem(key_file.read(), kid="issuer-01")
decoded = cwt.decode(token, public_key)
assert 8 in decoded and isinstance(decoded[8], dict)
assert 2 in decoded[8] and isinstance(decoded[8][2], list)
c = Claims.new(decoded)
extracted = EncryptedCOSEKey.to_cose_key(c.cnf, enc_key)
assert extracted.kty == 4 # Symmetric
assert extracted.alg == 5 # HMAC 256/256
assert extracted.key == b"a-client-secret-of-cwt-presenter"
def test_sample_readme_cwt_with_pop_kid_readable(self):
with open(key_path("private_key_ed25519.pem")) as key_file:
private_key = COSEKey.from_pem(key_file.read(), kid="issuer-01")
token = cwt.encode(
{
"iss": "coaps://as.example",
"sub": "dajiaji",
"cti": "123",
"cnf": {
"kid": "pop-key-id-of-cwt-presenter",
},
},
private_key,
)
with open(key_path("public_key_ed25519.pem")) as key_file:
public_key = COSEKey.from_pem(key_file.read(), kid="issuer-01")
decoded = cwt.decode(token, public_key)
assert 8 in decoded and isinstance(decoded[8], dict)
assert 3 in decoded[8] and decoded[8][3] == b"pop-key-id-of-cwt-presenter"
c = Claims.new(decoded)
assert c.cnf == "pop-key-id-of-cwt-presenter"
def test_sample_readme_cwt_with_pop_cose_key(self):
with open(key_path("private_key_ed25519.pem")) as key_file:
private_key = COSEKey.from_pem(key_file.read(), kid="issuer-01")
with open(key_path("public_key_es256.pem")) as key_file:
pop_key = COSEKey.from_pem(key_file.read())
token = cwt.encode(
{
1: "coaps://as.example", # iss
2: "dajiaji", # sub
7: b"123", # cti
8: { # cnf
1: pop_key.to_dict(),
},
},
private_key,
)
with open(key_path("public_key_ed25519.pem")) as key_file:
public_key = COSEKey.from_pem(key_file.read(), kid="issuer-01")
decoded = cwt.decode(token, public_key)
assert 8 in decoded and isinstance(decoded[8], dict)
assert 1 in decoded[8] and isinstance(decoded[8][1], dict)
extracted = COSEKey.new(decoded[8][1])
assert extracted.kty == 2 # EC2
assert extracted.crv == 1 # P-256
def test_sample_readme_cwt_with_pop_encrypted_cose_key(self):
with open(key_path("private_key_ed25519.pem")) as key_file:
private_key = COSEKey.from_pem(key_file.read(), kid="issuer-01")
enc_key = COSEKey.from_symmetric_key(
"a-client-secret-of-cwt-recipient", # Just 32 bytes!
alg="ChaCha20/Poly1305",
kid="presenter-01",
)
pop_key = COSEKey.from_symmetric_key(
"a-client-secret-of-cwt-presenter",
alg="HMAC 256/256",
)
token = cwt.encode(
{
1: "coaps://as.example", # iss
2: "dajiaji", # sub
7: b"123", # cti
8: { # cnf
2: EncryptedCOSEKey.from_cose_key(pop_key, enc_key),
},
},
private_key,
)
with open(key_path("public_key_ed25519.pem")) as key_file:
public_key = COSEKey.from_pem(key_file.read(), kid="issuer-01")
decoded = cwt.decode(token, public_key)
assert 8 in decoded and isinstance(decoded[8], dict)
assert 2 in decoded[8] and isinstance(decoded[8][2], list)
extracted = EncryptedCOSEKey.to_cose_key(decoded[8][2], enc_key)
assert extracted.kty == 4 # Symmetric
assert extracted.alg == 5 # HMAC 256/256
assert extracted.key == b"a-client-secret-of-cwt-presenter"
def test_sample_readme_cwt_with_pop_kid(self):
with open(key_path("private_key_ed25519.pem")) as key_file:
private_key = COSEKey.from_pem(key_file.read(), kid="01")
token = cwt.encode(
{
1: "coaps://as.example", # iss
2: "dajiaji", # sub
7: b"123", # cti
8: { # cnf
3: b"pop-key-id-of-cwt-presenter",
},
},
private_key,
)
with open(key_path("public_key_ed25519.pem")) as key_file:
public_key = COSEKey.from_pem(key_file.read(), kid="01")
decoded = cwt.decode(token, public_key)
assert 8 in decoded and isinstance(decoded[8], dict)
assert 3 in decoded[8] and decoded[8][3] == b"pop-key-id-of-cwt-presenter"
def test_sample_readme_cwt_with_user_defined_claims(self):
with open(key_path("private_key_ed25519.pem")) as key_file:
private_key = COSEKey.from_pem(key_file.read(), kid="01")
token = cwt.encode(
{
1: "coaps://as.example", # iss
2: "dajiaji", # sub
7: b"123", # cti
-70001: "foo",
-70002: ["bar"],
-70003: {"baz": "qux"},
-70004: 123,
},
private_key,
)
with open(key_path("public_key_ed25519.pem")) as key_file:
public_key = COSEKey.from_pem(key_file.read(), kid="01")
raw = cwt.decode(token, public_key)
assert raw[-70001] == "foo"
assert isinstance(raw[-70002], list)
assert raw[-70002][0] == "bar"
assert isinstance(raw[-70003], dict)
assert raw[-70003]["baz"] == "qux"
assert raw[-70004] == 123
readable = Claims.new(raw)
assert readable.get(-70001) == "foo"
assert readable.get(-70002)[0] == "bar"
assert readable.get(-70003)["baz"] == "qux"
assert readable.get(-70004) == 123
def test_sample_readme_cwt_with_user_defined_claims_readable(self):
with open(key_path("private_key_ed25519.pem")) as key_file:
private_key = COSEKey.from_pem(key_file.read(), kid="01")
cwt.set_private_claim_names(
{
"ext_1": -70001,
"ext_2": -70002,
"ext_3": -70003,
"ext_4": -70004,
}
)
token = cwt.encode(
{
"iss": "coaps://as.example",
"sub": "dajiaji",
"cti": b"123",
"ext_1": "foo",
"ext_2": ["bar"],
"ext_3": {"baz": "qux"},
"ext_4": 123,
},
private_key,
)
with open(key_path("public_key_ed25519.pem")) as key_file:
public_key = COSEKey.from_pem(key_file.read(), kid="01")
raw = cwt.decode(token, public_key)
readable = Claims.new(
raw,
private_claim_names={
"ext_1": -70001,
"ext_2": -70002,
"ext_3": -70003,
"ext_4": -70004,
},
)
assert readable.get("ext_1") == "foo"
assert readable.get("ext_2")[0] == "bar"
assert readable.get("ext_3")["baz"] == "qux"
assert readable.get("ext_4") == 123
def test_sample_readme_decode_with_multiple_keys(self):
with open(key_path("private_key_ed25519.pem")) as key_file:
private_key = COSEKey.from_pem(key_file.read(), kid="02")
token = cwt.encode(
{
"iss": "coaps://as.example",
"sub": "dajiaji",
"cti": b"123",
},
private_key,
)
with open(key_path("public_key_es256.pem")) as key_file:
public_key_1 = COSEKey.from_pem(key_file.read(), kid="01")
with open(key_path("public_key_ed25519.pem")) as key_file:
public_key_2 = COSEKey.from_pem(key_file.read(), kid="02")
decoded = cwt.decode(token, [public_key_1, public_key_2])
assert 1 in decoded and decoded[1] == "coaps://as.example"
def test_sample_rfc8392_a3(self):
key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_3))
encoded = bytes.fromhex(SAMPLE_CWT_RFC8392_A3)
decoded = cwt.decode(encoded, keys=key, no_verify=True)
assert 1 in decoded and decoded[1] == "coap://as.example.com"
assert 2 in decoded and decoded[2] == "erikw"
assert 3 in decoded and decoded[3] == "coap://light.example.com"
assert 4 in decoded and decoded[4] == 1444064944
assert 5 in decoded and decoded[5] == 1443944944
assert 6 in decoded and decoded[6] == 1443944944
assert 7 in decoded and decoded[7] == bytes.fromhex("0b71")
def test_sample_rfc8392_a3_with_encoding_old(self):
key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_3))
encoded = cwt.encode_and_sign(
{
1: "coap://as.example.com",
2: "erikw",
3: "coap://light.example.com",
4: 1444064944,
5: 1443944944,
6: 1443944944,
7: bytes.fromhex("0b71"),
},
key=key,
)
decoded = cwt.decode(encoded, keys=key, no_verify=True)
assert 1 in decoded and decoded[1] == "coap://as.example.com"
def test_sample_rfc8392_a3_with_encoding(self):
key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_3))
token = cwt.encode(
{
1: "coap://as.example.com",
2: "erikw",
3: "coap://light.example.com",
4: 1444064944,
5: 1443944944,
6: 1443944944,
7: bytes.fromhex("0b71"),
},
key,
)
decoded = cwt.decode(token, keys=key, no_verify=True)
assert 1 in decoded and decoded[1] == "coap://as.example.com"
def test_sample_rfc8392_a4_old(self):
key = COSEKey.new(
{
-1: bytes.fromhex(
"403697de87af64611c1d32a05dab0fe1fcb715a86ab435f1ec99192d79569388"
),
1: 4, # Symmetric
2: bytes.fromhex("53796d6d6574726963323536"),
3: 4, # HMAC256/64
}
)
encoded = cwt.encode_and_mac(
{
1: "coap://as.example.com",
2: "erikw",
3: "coap://light.example.com",
4: 1444064944,
5: 1443944944,
6: 1443944944,
7: bytes.fromhex("0b71"),
},
key=key,
tagged=True,
)
assert encoded == bytes.fromhex(SAMPLE_CWT_RFC8392_A4)
decoded = cwt.decode(encoded, keys=key, no_verify=True)
assert 1 in decoded and decoded[1] == "coap://as.example.com"
def test_sample_rfc8392_a4(self):
key = COSEKey.new(
{
-1: bytes.fromhex(
"403697de87af64611c1d32a05dab0fe1fcb715a86ab435f1ec99192d79569388"
),
1: 4, # Symmetric
2: bytes.fromhex("53796d6d6574726963323536"),
3: 4, # HMAC256/64
}
)
token = cwt.encode(
{
1: "coap://as.example.com",
2: "erikw",
3: "coap://light.example.com",
4: 1444064944,
5: 1443944944,
6: 1443944944,
7: bytes.fromhex("0b71"),
},
key,
tagged=True,
)
assert token == bytes.fromhex(SAMPLE_CWT_RFC8392_A4)
decoded = cwt.decode(token, keys=key, no_verify=True)
assert 1 in decoded and decoded[1] == "coap://as.example.com"
def test_sample_rfc8392_a5_old(self):
key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_1))
nonce = bytes.fromhex("99a0d7846e762c49ffe8a63e0b")
encoded = cwt.encode_and_encrypt(
{
1: "coap://as.example.com",
2: "erikw",
3: "coap://light.example.com",
4: 1444064944,
5: 1443944944,
6: 1443944944,
7: bytes.fromhex("0b71"),
},
key=key,
nonce=nonce,
)
assert encoded == bytes.fromhex(SAMPLE_CWT_RFC8392_A5)
decoded = cwt.decode(encoded, keys=key, no_verify=True)
assert 1 in decoded and decoded[1] == "coap://as.example.com"
def test_sample_rfc8392_a5(self):
key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_1))
nonce = bytes.fromhex("99a0d7846e762c49ffe8a63e0b")
token = cwt.encode(
{
1: "coap://as.example.com",
2: "erikw",
3: "coap://light.example.com",
4: 1444064944,
5: 1443944944,
6: 1443944944,
7: bytes.fromhex("0b71"),
},
key=key,
nonce=nonce,
)
assert token == bytes.fromhex(SAMPLE_CWT_RFC8392_A5)
decoded = cwt.decode(token, keys=key, no_verify=True)
assert 1 in decoded and decoded[1] == "coap://as.example.com"
def test_sample_rfc8392_a6(self):
sig_key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_3))
enc_key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_1))
encrypted = bytes.fromhex(SAMPLE_CWT_RFC8392_A6)
decoded = cwt.decode(encrypted, keys=[enc_key, sig_key], no_verify=True)
assert 1 in decoded and decoded[1] == "coap://as.example.com"
def test_sample_rfc8392_a6_with_encoding_old(self):
sig_key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_3))
signed = cwt.encode_and_sign(
{
1: "coap://as.example.com",
2: "erikw",
3: "coap://light.example.com",
4: 1444064944,
5: 1443944944,
6: 1443944944,
7: bytes.fromhex("0b71"),
},
key=sig_key,
)
enc_key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_1))
nonce = bytes.fromhex("4a0694c0e69ee6b5956655c7b2")
encrypted = cwt.encode_and_encrypt(signed, key=enc_key, nonce=nonce)
decoded = cwt.decode(encrypted, keys=[enc_key, sig_key], no_verify=True)
assert 1 in decoded and decoded[1] == "coap://as.example.com"
def test_sample_rfc8392_a6_with_encoding(self):
sig_key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_3))
signed = cwt.encode(
{
1: "coap://as.example.com",
2: "erikw",
3: "coap://light.example.com",
4: 1444064944,
5: 1443944944,
6: 1443944944,
7: bytes.fromhex("0b71"),
},
key=sig_key,
)
enc_key = COSEKey.from_bytes(bytes.fromhex(SAMPLE_COSE_KEY_RFC8392_A2_1))
nonce = bytes.fromhex("4a0694c0e69ee6b5956655c7b2")
encrypted = cwt.encode(signed, key=enc_key, nonce=nonce)
decoded = cwt.decode(encrypted, keys=[enc_key, sig_key], no_verify=True)
assert 1 in decoded and decoded[1] == "coap://as.example.com"
def test_sample_hcert_testdata_AT_2DCode_raw_1(self):
# A DSC(Document Signing Certificate) issued by a CSCA (Certificate Signing Certificate Authority).
dsc = "-----BEGIN CERTIFICATE-----\nMIIBvTCCAWOgAwIBAgIKAXk8i88OleLsuTAKBggqhkjOPQQDAjA2MRYwFAYDVQQDDA1BVCBER0MgQ1NDQSAxMQswCQYDVQQGEwJBVDEPMA0GA1UECgwGQk1TR1BLMB4XDTIxMDUwNTEyNDEwNloXDTIzMDUwNTEyNDEwNlowPTERMA8GA1UEAwwIQVQgRFNDIDExCzAJBgNVBAYTAkFUMQ8wDQYDVQQKDAZCTVNHUEsxCjAIBgNVBAUTATEwWTATBgcqhkjOPQIBBggqhkjOPQMBBwNCAASt1Vz1rRuW1HqObUE9MDe7RzIk1gq4XW5GTyHuHTj5cFEn2Rge37+hINfCZZcozpwQKdyaporPUP1TE7UWl0F3o1IwUDAOBgNVHQ8BAf8EBAMCB4AwHQYDVR0OBBYEFO49y1ISb6cvXshLcp8UUp9VoGLQMB8GA1UdIwQYMBaAFP7JKEOflGEvef2iMdtopsetwGGeMAoGCCqGSM49BAMCA0gAMEUCIQDG2opotWG8tJXN84ZZqT6wUBz9KF8D+z9NukYvnUEQ3QIgdBLFSTSiDt0UJaDF6St2bkUQuVHW6fQbONd731/M4nc=\n-----END CERTIFICATE-----"
# An EUDCC (EU Digital COVID Certificate)
eudcc = bytes.fromhex(
"d2844da20448d919375fc1e7b6b20126a0590133a4041a61817ca0061a60942ea001624154390103a101a4617681aa62646e01626d616d4f52472d3130303033303231356276706a313131393334393030376264746a323032312d30322d313862636f624154626369783155524e3a555643493a30313a41543a31303830373834334639344145453045453530393346424332353442443831332342626d706c45552f312f32302f31353238626973781b4d696e6973747279206f66204865616c74682c20417573747269616273640262746769383430353339303036636e616da463666e74754d5553544552465241553c474f455353494e47455262666e754d7573746572667261752d47c3b6c39f696e67657263676e74684741425249454c4562676e684761627269656c656376657265312e302e3063646f626a313939382d30322d323658405812fce67cb84c3911d78e3f61f890d0c80eb9675806aebed66aa2d0d0c91d1fc98d7bcb80bf00e181806a9502e11b071325901bd0d2c1b6438747b8cc50f521"
)
public_key = load_pem_hcert_dsc(dsc)
decoded | |
This is usually retrieved from a previous list call.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.data_catalog.models.JobCollection`
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/catalogs/{catalogId}/jobs"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"display_name",
"lifecycle_state",
"time_created",
"time_updated",
"created_by_id",
"updated_by_id",
"job_type",
"job_definition_key",
"schedule_cron_expression",
"time_schedule_begin",
"time_schedule_end",
"schedule_type",
"connection_key",
"fields",
"execution_count",
"time_of_latest_execution",
"sort_by",
"sort_order",
"limit",
"page",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_jobs got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"catalogId": catalog_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
if 'fields' in kwargs:
fields_allowed_values = ["key", "displayName", "description", "catalogId", "jobDefinitionKey", "lifecycleState", "timeCreated", "timeUpdated", "createdById", "updatedById", "jobType", "scheduleCronExpression", "timeScheduleBegin", "scheduleType", "executionCount", "timeOfLatestExecution", "executions", "uri"]
for fields_item in kwargs['fields']:
if fields_item not in fields_allowed_values:
raise ValueError(
"Invalid value for `fields`, must be one of {0}".format(fields_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["TIMECREATED", "DISPLAYNAME"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
query_params = {
"displayName": kwargs.get("display_name", missing),
"lifecycleState": kwargs.get("lifecycle_state", missing),
"timeCreated": kwargs.get("time_created", missing),
"timeUpdated": kwargs.get("time_updated", missing),
"createdById": kwargs.get("created_by_id", missing),
"updatedById": kwargs.get("updated_by_id", missing),
"jobType": kwargs.get("job_type", missing),
"jobDefinitionKey": kwargs.get("job_definition_key", missing),
"scheduleCronExpression": kwargs.get("schedule_cron_expression", missing),
"timeScheduleBegin": kwargs.get("time_schedule_begin", missing),
"timeScheduleEnd": kwargs.get("time_schedule_end", missing),
"scheduleType": kwargs.get("schedule_type", missing),
"connectionKey": kwargs.get("connection_key", missing),
"fields": self.base_client.generate_collection_format_param(kwargs.get("fields", missing), 'multi'),
"executionCount": kwargs.get("execution_count", missing),
"timeOfLatestExecution": kwargs.get("time_of_latest_execution", missing),
"sortBy": kwargs.get("sort_by", missing),
"sortOrder": kwargs.get("sort_order", missing),
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="JobCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="JobCollection")
def list_tags(self, catalog_id, **kwargs):
"""
Returns a list of all user created tags in the system.
:param str catalog_id: (required)
Unique catalog identifier.
:param str display_name: (optional)
A filter to return only resources that match the entire display name given. The match is not case sensitive.
:param str lifecycle_state: (optional)
A filter to return only resources that match the specified lifecycle state. The value is case insensitive.
:param list[str] fields: (optional)
Specifies the fields to return in a term summary response.
Allowed values are: "key", "displayName", "description", "glossaryKey", "parentTermKey", "isAllowedToHaveChildTerms", "path", "lifecycleState", "timeCreated", "workflowStatus", "associatedObjectCount", "uri"
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. If no value is specified TIMECREATED is default.
Allowed values are: "TIMECREATED", "DISPLAYNAME"
:param str sort_order: (optional)
The sort order to use, either 'asc' or 'desc'.
Allowed values are: "ASC", "DESC"
:param int limit: (optional)
The maximum number of items to return.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.data_catalog.models.TermCollection`
:rtype: :class:`~oci.response.Response`
"""
resource_path = "/catalogs/{catalogId}/tags"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"display_name",
"lifecycle_state",
"fields",
"sort_by",
"sort_order",
"limit",
"page",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_tags got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"catalogId": catalog_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
if 'fields' in kwargs:
fields_allowed_values = ["key", "displayName", "description", "glossaryKey", "parentTermKey", "isAllowedToHaveChildTerms", "path", "lifecycleState", "timeCreated", "workflowStatus", "associatedObjectCount", "uri"]
for fields_item in kwargs['fields']:
if fields_item not in fields_allowed_values:
raise ValueError(
"Invalid value for `fields`, must be one of {0}".format(fields_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["TIMECREATED", "DISPLAYNAME"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
query_params = {
"displayName": kwargs.get("display_name", missing),
"lifecycleState": kwargs.get("lifecycle_state", missing),
"fields": self.base_client.generate_collection_format_param(kwargs.get("fields", missing), 'multi'),
"sortBy": kwargs.get("sort_by", missing),
"sortOrder": kwargs.get("sort_order", missing),
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="TermCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="TermCollection")
def list_term_relationships(self, catalog_id, glossary_key, term_key, **kwargs):
"""
Returns a list of all term relationships within a glossary.
:param str catalog_id: (required)
Unique catalog identifier.
:param str glossary_key: (required)
Unique glossary key.
:param str term_key: (required)
Unique glossary term key.
:param str display_name: (optional)
A filter to return only resources that match the entire display name given. The match is not case sensitive.
:param str lifecycle_state: (optional)
A filter to return only resources that match the specified lifecycle state. The value is case insensitive.
:param list[str] fields: (optional)
Specifies the fields to return in a term relationship summary response.
Allowed values are: "key", "displayName", "description", "relatedTermKey", "relatedTermDisplayName", "parentTermKey", "parentTermDisplayName", "lifecycleState", "timeCreated", "uri"
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Default order for TIMECREATED is descending. Default order for DISPLAYNAME is ascending. If no value is specified TIMECREATED is default.
Allowed values are: "TIMECREATED", "DISPLAYNAME"
:param str sort_order: (optional)
The sort order to use, either 'asc' or 'desc'.
Allowed values are: "ASC", "DESC"
:param int limit: (optional)
The maximum number of items to return.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param str opc_request_id: (optional)
The client request ID for tracing.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://oracle-cloud-infrastructure-python-sdk.readthedocs.io/en/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance | |
# coding: utf-8
import math
import cairo
import numpy
from shapely.geometry import Point, LineString, Polygon, box
from mapython import projection
from mapython import utils
class Map(object):
'''
Creates drawable map object which can be rendered.
:param fobj: a filename or writable file object
:param bbox: iterable containing the max extents of the map in the
following form: (minlon, minlat, maxlon, maxlat)
:param max_size: max map width/height in pixel/point according to
surface_type
:param projection: projection function for drawing the map,
should return (x, y) in metres. Some functions are predefined in
:mod:`mapython.projection`
:param surface_type: must be one of png, pdf, ps or svg
'''
SURFACE_TYPES = {
# 'png':cairo.ImageSurface, => needs special treatment
'pdf': cairo.PDFSurface,
'ps': cairo.PSSurface,
'svg': cairo.SVGSurface,
}
def __init__(
self,
fobj,
bbox,
max_size=800,
proj=projection.mercator,
surface_type='png',
):
self.fobj = fobj
self.bbox = box(*bbox)
self.max_size = max_size
self.surface_type = surface_type
# projection can't be integrated in matrix because projection is not
# necessarily linear
self.projection = proj
# inits: self.x_diff, self.y_diff, self.x0, self.y0
self._init_coord_system()
# inits: self.width, self.height, self.surface
self._init_surface()
# inits: self.m2unit_matrix, self.unit2m_matrix, self.scale
self._init_transformation()
self.context = cairo.Context(self.surface)
self.map_area = box(0, 0, self.width, self.height)
self.conflict_area = Polygon()
def _init_coord_system(self):
minlon, minlat, maxlon, maxlat = self.bbox.bounds
#: convert to metres
minx, miny = self.projection(minlon, minlat)
maxx, maxy = self.projection(maxlon, maxlat)
#: calculate map size in metres
self.x_diff = abs(maxx - minx)
self.y_diff = abs(maxy - miny)
#: orientate coord system
self.x0, self.y0 = minx, miny
#: determine coordinate center x0 and y0
if minx > maxx:
self.x0 = maxx
if miny < maxy:
self.y0 = maxy
def _init_surface(self):
#: calculate surface size in unit
if self.x_diff > self.y_diff:
self.width = self.max_size
self.height = int(math.ceil(self.max_size/self.x_diff*self.y_diff))
else:
self.width = int(math.ceil(self.max_size/self.y_diff*self.x_diff))
self.height = self.max_size
#: init surface object according to surface_type
if self.SURFACE_TYPES.get(self.surface_type) is not None:
surface_cls = self.SURFACE_TYPES.get(self.surface_type)
self.surface = surface_cls(self.fobj, self.width, self.height)
#: fall back to png as default type
else:
self.surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, self.width,
self.height)
def _init_transformation(self):
x_scale = self.width / self.x_diff # unit per metre
y_scale = self.height / self.y_diff # unit per metre
#: transformation matrix to convert from metres to unit
self.m2unit_matrix = cairo.Matrix(xx=x_scale, yy=y_scale)
#: transformation matrix to convert from unit to metres
#: NOTE: copy.copy or copy.deepcopy of m2unit_matrix does not work
self.unit2m_matrix = cairo.Matrix(xx=x_scale, yy=y_scale)
self.unit2m_matrix.invert()
#: determine average metres per px => scale
dist = self.unit2m_matrix.transform_distance(math.sqrt(0.5),
math.sqrt(0.5))
self.scale = sum(dist) / 2
def draw_background(self, color):
'''
Fills the whole map with color.
:param color: ``(r, g, b[, a])``
'''
self.context.rectangle(0, 0, self.width, self.height)
self.context.set_source_rgba(*color)
self.context.fill()
def draw_line(
self,
coords,
color=(0, 0, 0),
width=1,
line_cap=cairo.LINE_CAP_ROUND,
line_join=cairo.LINE_JOIN_ROUND,
line_dash=None
):
'''
Draws a line.
:param coords: iterable containing all coordinates as ``(lon, lat)``
:param color: ``(r, g, b[, a])``
:param width: line-width in unit (pixel/point)
:param line_cap: one of :const:`cairo.LINE_CAP_*`
:param line_join: one of :const:`cairo.LINE_JOIN_*`
:param line_dash: list/tuple used by :meth:`cairo.Context.set_dash`
'''
#: move to first coords
x, y = self.transform_coords(*coords[0])
self.context.move_to(x, y)
#: draw line to rest of coords
for lon, lat in coords[1:]:
x, y = self.transform_coords(lon, lat)
self.context.line_to(x, y)
#: fill line with color
self.context.set_source_rgba(*color)
self.context.set_line_width(width)
self.context.set_line_cap(line_cap)
self.context.set_line_join(line_join)
self.context.set_dash(line_dash or tuple())
self.context.stroke()
def draw_polygon(
self,
exterior,
interiors=None,
background_color=(0, 0, 0, 0),
background_image=None,
border_width=0,
border_color=(1, 1, 1, 1),
border_line_cap=cairo.LINE_CAP_ROUND,
border_line_join=cairo.LINE_JOIN_ROUND,
border_line_dash=None
):
'''
Draws a polygon.
:param exterior: iterable containing all exterior coordinates as
``(lon, lat)``
:param interiors: list/tuple with separate iterables containing
coordinates of the holes of the polygon as ``(lon, lat)``
:param background_color: ``(r, g, b[, a])``
:param background_image: file object or path to image file
:param border_width: border-width in unit (pixel/point)
:param border_color: ``(r, g, b[, a])``
:param border_line_cap: one of :const:`cairo.LINE_CAP_*`
:param border_line_join: one of :const:`cairo.LINE_JOIN_*`
:param border_line_dash: list/tuple used by
:meth:`cairo.Context.set_dash`
'''
polygons = (exterior, )
if interiors is not None:
polygons += interiors
for coords in polygons:
#: move to first coords
x, y = self.transform_coords(*coords[0])
self.context.move_to(x, y)
#: draw line to rest of coords
for lon, lat in coords[1:]:
x, y = self.transform_coords(lon, lat)
self.context.line_to(x, y)
#: fill polygon with color [and background]
self.context.set_source_rgba(*background_color)
if background_image is not None:
self.context.fill_preserve()
image = cairo.ImageSurface.create_from_png(background_image)
pattern = cairo.SurfacePattern(image)
pattern.set_extend(cairo.EXTEND_REPEAT)
self.context.set_source(pattern)
self.context.fill_preserve()
#: draw border
self.context.set_source_rgba(*border_color)
self.context.set_line_width(border_width)
self.context.set_line_cap(border_line_cap)
self.context.set_line_join(border_line_join)
self.context.set_dash(border_line_dash or tuple())
self.context.stroke()
def draw_arc(
self,
coord,
radius,
angle1=0,
angle2=2*math.pi,
background_color=(0, 0, 0, 0),
background_image=None,
border_width=0,
border_color=(1, 1, 1, 1),
border_line_cap=cairo.LINE_CAP_ROUND,
border_line_join=cairo.LINE_JOIN_ROUND,
border_line_dash=None
):
'''
Draws an arc. Angles are counted from the positive X axis
to the positive Y axis.
:param coord: ``(lon, lat)``
:param radius: as float in unit (pixel/point)
:param angle1: start angle in radians [0, 2pi]
:param angle2: end angle in radians [0, 2pi]
:param background_color: ``(r, g, b[, a])``
:param background_image: file object or path to image file
:param border_width: border-width in unit (pixel/point)
:param border_color: ``(r, g, b[, a])``
:param border_line_cap: one of :const:`cairo.LINE_CAP_*`
:param border_line_join: one of :const:`cairo.LINE_JOIN_*`
:param border_line_dash: list/tuple used by
:meth:`cairo.Context.set_dash`
'''
x, y = self.transform_coords(*coord)
#: draw circle
circle = (angle1 - angle2) % (2 * math.pi) == 0
if not circle:
self.context.move_to(x, y)
self.context.arc(x, y, radius, angle1, angle2)
if not circle:
self.context.close_path()
#: fill arc with color [and background]
self.context.set_source_rgba(*background_color)
if background_image is not None:
self.context.fill_preserve()
image = cairo.ImageSurface.create_from_png(background_image)
pattern = cairo.SurfacePattern(image)
pattern.set_extend(cairo.EXTEND_REPEAT)
self.context.set_source(pattern)
self.context.fill_preserve()
#: draw border
self.context.set_source_rgba(*border_color)
self.context.set_line_width(border_width)
self.context.set_line_cap(border_line_cap)
self.context.set_line_join(border_line_join)
self.context.set_dash(border_line_dash or tuple())
self.context.stroke()
def draw_text(
self,
coord,
text,
color=(0, 0, 0),
font_size=11,
font_family='Tahoma',
font_style=cairo.FONT_SLANT_NORMAL,
font_weight=cairo.FONT_WEIGHT_NORMAL,
text_halo_width=3,
text_halo_color=(1, 1, 1),
text_halo_line_cap=cairo.LINE_CAP_ROUND,
text_halo_line_join=cairo.LINE_JOIN_ROUND,
text_halo_line_dash=None,
text_transform=None,
image=None,
image_margin=4
):
'''
Draws text either centered on coordinate or the image centered on
coordinate and text on the right of the image.
:param coord: ``(lon, lat)``
:param text: text to be drawn
:param color: ``(r, g, b[, a])``
:param font_size: font-size in unit (pixel/point)
:param font_family: font name
:param font_style: ``cairo.FONT_SLANT_NORMAL``,
``cairo.FONT_SLANT_ITALIC`` or ``cairo.FONT_SLANT_OBLIQUE``
:param font_weight: ``cairo.FONT_WEIGHT_NORMAL`` or
``cairo.FONT_WEIGHT_BOLD``
:param text_halo_width: border-width in unit (pixel/point)
:param text_halo_color: ``(r, g, b[, a])``
:param text_halo_line_cap: one of :const:`cairo.LINE_CAP_*`
:param text_halo_line_join: one of :const:`cairo.LINE_JOIN_*`
:param text_halo_line_dash: list/tuple used by
:meth:`cairo.Context.set_dash`
:param text_transform: one of ``'lowercase'``, ``'uppercase'`` or
``'capitalize'``
:param image: file object or path to image file
:param image_margin: space between text and image in int or float
'''
x, y = self.transform_coords(*coord)
# abort if there are already too many text_paths in this area
if self.conflict_density(x, y) > 0.07:
self.context.new_path()
return
text = utils.text_transform(text, text_transform)
#: draw spot name
self.context.select_font_face(font_family, font_style, font_weight)
self.context.set_font_size(font_size)
width, height = self.context.text_extents(text)[2:4]
if image is not None:
image = cairo.ImageSurface.create_from_png(image)
image_width, image_height = image.get_width(), image.get_height()
text_area = box(
x - image_width / 2.0,
y - max(height, image_height) / 2.0,
x + image_width + width + image_margin,
y + max(height, image_height) / 2.0
)
else:
# place text directly on coord
x -= width / 2.0
text_area = box(x, y - height / 2., x + width, y + height / 2.)
try:
newx, newy = self.find_free_position(text_area)
except TypeError: # no free position found
self.context.new_path()
return
if image is not None:
y = newy + (max(height, image_height) - image_height) / 2.0
self.context.set_source_surface(image, newx, y)
self.context.paint()
image_area = box(x, y, x + image_width, y + image_height)
newx += image_width + image_margin
newy += (image_height + height) / 2.0
else:
# find_free_position uses minx and miny as position but
# cairo uses bottom left corner
newx, newy = newx, newy + height
# abort if new position is too far away from original position
if Point(newx, newy).distance(Point(x, y)) > 0.1 * self.max_size:
self.context.new_path()
return
# round positions for clear text rendering
self.context.move_to(int(newx), int(newy))
self.context.text_path(text)
#: draw text halo
self.context.set_line_cap(cairo.LINE_CAP_ROUND)
self.context.set_source_rgba(*text_halo_color)
self.context.set_line_width(2 * text_halo_width)
self.context.set_line_cap(text_halo_line_cap)
self.context.set_line_join(text_halo_line_join)
self.context.set_dash(text_halo_line_dash or tuple())
self.context.stroke_preserve()
#: determine covered area by text
area = box(*self.context.path_extents())
if image is not None:
area = area.union(image_area)
self.conflict_union(area)
#: fill characters with color
self.context.set_source_rgba(*color)
self.context.fill()
def draw_text_on_line(
self,
coords,
text,
color=(0, 0, 0),
font_size=10,
font_family='Tahoma',
font_style=cairo.FONT_SLANT_NORMAL,
font_weight=cairo.FONT_WEIGHT_NORMAL,
text_halo_width=1,
text_halo_color=(1, 1, 1),
text_halo_line_cap=cairo.LINE_CAP_ROUND,
text_halo_line_join=cairo.LINE_JOIN_ROUND,
text_halo_line_dash=None,
text_transform=None,
):
'''
Draws text on a line. Tries to find a position with | |
1.1.1.1'
publickeyfile = get_ssh_pub_key()
if publickeyfile is not None:
publickeyfile = open(publickeyfile).read().rstrip()
if not keys:
keys = [publickeyfile]
else:
keys.append(publickeyfile)
keys = '\n'.join(keys)
host_name = "%s.%s" % (name, domain) if domain is not None else name
initialization = types.Initialization(user_name=user_name, root_password=<PASSWORD>,
authorized_ssh_keys=keys, host_name=host_name,
nic_configurations=nic_configurations, dns_servers=dns,
dns_search=domain, custom_script=custom_script)
if start:
vm_service.start(use_cloud_init=cloudinit, use_ignition=ignition,
vm=types.Vm(initialization=initialization, host=vmhost))
if ip is not None:
self.update_metadata(name, 'ip', ip)
return {'result': 'success'}
def start(self, name):
vmsearch = self.vms_service.list(search='name=%s' % name)
if not vmsearch:
error("VM %s not found" % name)
return {'result': 'failure', 'reason': "VM %s not found" % name}
vminfo = vmsearch[0]
if str(vminfo.status) == 'down':
vm = self.vms_service.vm_service(vmsearch[0].id)
vm.start()
return {'result': 'success'}
def stop(self, name, soft=False):
vmsearch = self.vms_service.list(search='name=%s' % name)
if not vmsearch:
error("VM %s not found" % name)
return {'result': 'failure', 'reason': "VM %s not found" % name}
vminfo = vmsearch[0]
if str(vminfo.status) != 'down':
vm = self.vms_service.vm_service(vmsearch[0].id)
vm.stop()
return {'result': 'success'}
def snapshot(self, name, base, revert=False, delete=False, listing=False):
vmsearch = self.vms_service.list(search='name=%s' % base)
if not vmsearch:
error("VM %s not found" % base)
return {'result': 'failure', 'reason': "VM %s not found" % base}
vm = vmsearch[0]
snapshots_service = self.vms_service.vm_service(vm.id).snapshots_service()
snapshots_service.add(types.Snapshot(description=name))
return
def restart(self, name):
vmsearch = self.vms_service.list(search='name=%s' % name)
if not vmsearch:
error("VM %s not found" % name)
return {'result': 'failure', 'reason': "VM %s not found" % name}
vm = vmsearch[0]
status = str(vm.status)
vm = self.vms_service.vm_service(vmsearch[0].id)
if status == 'down':
vm.start()
else:
vm.reboot()
return {'result': 'success'}
def report(self):
api = self.conn.system_service().get()
system_service = self.conn.system_service()
# vmslist = self.vms_service.list()
# print("Vms Running: %s" % len(vmslist))
print("Version: %s" % api.product_info.version.full_version)
if api.summary.vms is not None:
print("Vms Running: %s" % api.summary.vms.total)
if api.summary.hosts is not None:
print("Hosts: %d" % api.summary.hosts.total)
hosts_service = self.conn.system_service().hosts_service()
for host in hosts_service.list():
print("Host: %s" % host.name)
if api.summary.storage_domains is not None:
print("Storage Domains: %d" % api.summary.storage_domains.total)
sds_service = system_service.storage_domains_service()
for sd in sds_service.list():
print("Storage Domain: %s" % sd.name)
def status(self, name):
print("not implemented")
return
def list(self):
vms = []
system_service = self.conn.system_service()
if self.filtertag is not None:
vmslist = self.vms_service.list(search='description=plan*,filter=%s*' % self.filtertag)
elif self.filteruser:
users_service = system_service.users_service()
user_name = '%s-authz' % self.user if '@internal' in self.user else self.user
userid = [u.id for u in users_service.list() if u.user_name == user_name][0]
vmslist = self.vms_service.list(search='created_by_user_id=%s' % userid)
elif self.filtervms:
vmslist = self.vms_service.list(search='description=plan=*,profile=*')
else:
vmslist = self.vms_service.list()
for vm in vmslist:
vms.append(self.info(vm.name, vm=vm))
return sorted(vms, key=lambda x: x['name'])
def console(self, name, tunnel=False, web=False):
connectiondetails = None
vmsearch = self.vms_service.list(search='name=%s' % name)
if not vmsearch:
error("VM %s not found" % name)
return {'result': 'failure', 'reason': "VM %s not found" % name}
vm = vmsearch[0]
vm_service = self.vms_service.vm_service(vm.id)
consoles_service = vm_service.graphics_consoles_service()
consoles = consoles_service.list(current=True)
for c in consoles:
console_service = consoles_service.console_service(c.id)
ticket = console_service.ticket()
if str(c.protocol) == 'spice':
ocacontent = open(self.ca_file).read().replace('\n', '\\n')
try:
host = self.conn.follow_link(vm.host)
hostname = host.address
except:
hostname = c.address
subject = 'O=%s,CN=%s' % (self.org, hostname)
if tunnel:
localport1 = common.get_free_port()
localport2 = common.get_free_port()
command = "ssh -o LogLevel=QUIET -f -p %s -L %s:%s:%s -L %s:%s:%s %s@%s sleep 5"\
% (self.port, localport1, c.address, c.port, localport2, c.address, c.tls_port, self.ssh_user,
self.host)
os.system(command)
address = '127.0.0.1' if tunnel else c.address
port = localport1 if tunnel else c.port
sport = localport2 if tunnel else c.tls_port
connectiondetails = """[virt-viewer]
type=spice
host={address}
port={port}
password={<PASSWORD>}
tls-port={sport}
fullscreen=0
title={name}:%d
enable-smartcard=0
enable-usb-autoshare=1
delete-this-file=1
usb-filter=-1,-1,-1,-1,0
tls-ciphers=DEFAULT
host-subject={subject}
ca={ocacontent}
toggle-fullscreen=shift+f11
release-cursor=shift+f12
secure-attention=ctrl+alt+end
secure-channels=main;inputs;cursor;playback;record;display;usbredir;smartcard""".format(subject=subject,
ocacontent=ocacontent,
address=address,
port=port,
sport=sport,
ticket=ticket.value,
name=name)
elif str(c.protocol) == 'vnc':
if tunnel:
localport1 = common.get_free_port()
command = "ssh -o LogLevel=QUIET -f -p %s -L %s:%s:%s %s@%s sleep 5"\
% (self.port, localport1, c.address, c.port, self.ssh_user, self.host)
os.system(command)
address = '127.0.0.1' if tunnel else c.address
port = localport1 if tunnel else c.port
connectiondetails = """[virt-viewer]
type=vnc
host={address}
port={port}
password={<PASSWORD>}
title={name}:%d
delete-this-file=1
toggle-fullscreen=shift+f11
release-cursor=shift+f12""".format(address=address, port=port, ticket=ticket.value, name=name)
if connectiondetails is None:
error("Couldn't retrieve connection details for %s" % name)
sys.exit(1)
if web:
return "%s://%s:%s+%s" % (c.protocol, address, sport if str(c.protocol) == 'spice' else port, ticket.value)
with open("/tmp/console.vv", "w") as f:
f.write(connectiondetails)
if self.debug or os.path.exists("/i_am_a_container"):
msg = "Use remote-viewer with this:\n%s" % connectiondetails if not self.debug else connectiondetails
pprint(msg)
else:
os.popen("remote-viewer /tmp/console.vv &")
return
def serialconsole(self, name, web=False):
"""
:param name:
:return:
"""
# localport1 = common.get_free_port()
# command = "ssh -o LogLevel=QUIET -f -p %s -L %s:127.0.0.1:2222 ovirt-vmconsole@%s sleep 10"\
# % (self.port, localport, self.host)
# os.popen(command)
system_service = self.conn.system_service()
users_service = system_service.users_service()
user = users_service.list(search='usrname=%s-authz' % self.user)[0]
user_service = users_service.user_service(user.id)
vmsearch = self.vms_service.list(search='name=%s' % name)
if not vmsearch:
error("VM %s not found" % name)
return {'result': 'failure', 'reason': "VM %s not found" % name}
vm = vmsearch[0]
permissions_service = self.vms_service.vm_service(vm.id).permissions_service()
permissions_service.add(types.Permission(user=types.User(id=user.id), role=types.Role(name='UserVmManager')))
keys_service = user_service.ssh_public_keys_service()
publickeyfile = get_ssh_pub_key()
if publickeyfile is None:
error("neither id_rsa, id_dsa nor id_ed25519 public keys found in your .ssh directory. This is required")
return
publickeyfile = open(publickeyfile).read().rstrip()
try:
keys_service.add(key=types.SshPublicKey(content=publickeyfile))
except:
pass
command = "ssh -t -p 2222 ovirt-vmconsole@%s connect --vm-name %s" % (self.host, name)
if web:
return command
call(command, shell=True)
return
def dnsinfo(self, name):
vmsearch = self.vms_service.list(search='name=%s' % name)
if not vmsearch:
return None, None
vm = vmsearch[0]
dnsclient, domain = None, None
description = vm.description.split(',')
for description in vm.description.split(','):
desc = description.split('=')
if len(desc) == 2:
if desc[0] == 'dnsclient':
dnsclient = desc[1]
if desc[0] == 'domain':
domain = desc[1]
return dnsclient, domain
def info(self, name, vm=None, debug=False):
conn = self.conn
minimal = False
if vm is None:
vmsearch = self.vms_service.list(search='name=%s' % name)
if not vmsearch:
error("VM %s not found" % name)
return {}
vm = vmsearch[0]
else:
minimal = True
status = str(vm.status)
yamlinfo = {'name': vm.name, 'disks': [], 'nets': [], 'status': status, 'instanceid': vm.id}
template = conn.follow_link(vm.template)
image = template.name
yamlinfo['image'] = image
yamlinfo['user'] = common.get_user(image)
for description in vm.description.split(','):
desc = description.split('=')
if len(desc) == 2:
if desc[0] == 'filter':
continue
else:
yamlinfo[desc[0]] = desc[1]
try:
if status == 'up':
host = conn.follow_link(vm.host)
yamlinfo['host'] = host.name
except:
pass
yamlinfo['memory'] = int(vm._memory / 1024 / 1024)
cpus = vm.cpu.topology.cores * vm.cpu.topology.sockets
yamlinfo['cpus'] = cpus
yamlinfo['creationdate'] = vm._creation_time.strftime("%d-%m-%Y %H:%M")
devices = self.vms_service.vm_service(vm.id).reported_devices_service().list()
ips = []
for device in devices:
if device.ips:
for ip in device.ips:
if str(ip.version) == 'v4' and ip.address not in ['172.17.0.1', '127.0.0.1']:
ips.append(ip.address)
nics = self.vms_service.vm_service(vm.id).nics_service().list()
profiles_service = self.conn.system_service().vnic_profiles_service()
if ips:
yamlinfo['ip'] = ips[0]
if minimal:
return yamlinfo
if not self.netprofiles:
self.netprofiles = {}
for profile in profiles_service.list():
self.netprofiles[profile.id] = profile.name
for nic in nics:
device = nic.name
mac = nic.mac.address
network = 'N/A'
if nic.vnic_profile is not None and nic.vnic_profile.id in self.netprofiles:
network = self.netprofiles[nic.vnic_profile.id]
network_type = str(nic.interface)
yamlinfo['nets'].append({'device': device, 'mac': mac, 'net': network, 'type': network_type})
attachments = self.vms_service.vm_service(vm.id).disk_attachments_service().list()
for attachment in attachments:
disk = conn.follow_link(attachment.disk)
storagedomain = conn.follow_link(disk.storage_domains[0]).name if disk.storage_domains else ''
device = disk.name
disksize = int(disk.provisioned_size / 2**30)
diskformat = str(disk.format)
drivertype = str(disk.content_type)
path = disk.id
yamlinfo['disks'].append({'device': device, 'size': disksize, 'format': diskformat, 'type': drivertype,
'path': "%s/%s" % (storagedomain, path)})
if image is None and 'kubetype' in yamlinfo and yamlinfo['kubetype'] == 'openshift':
yamlinfo['user'] = 'core'
cdroms_service = self.vms_service.vm_service(vm.id).cdroms_service()
cdroms = cdroms_service.list()
if cdroms:
cdrom = cdroms[0]
if cdrom.file is not None:
iso = cdrom.file.id
disks_service = self.conn.system_service().disks_service()
disksearch = disks_service.list(search=f'id={iso}')
if disksearch:
yamlinfo['iso'] = disksearch[0].name
if debug:
yamlinfo['debug'] = vars(vm)
return yamlinfo
def ip(self, name):
vmsearch = self.vms_service.list(search='name=%s' % name)
if not vmsearch:
error("VM %s not found" % name)
return None
vm = vmsearch[0]
ips = []
devices = self.vms_service.vm_service(vm.id).reported_devices_service().list()
for device in devices:
if device.ips:
for i in device.ips:
if str(i.version) == 'v4' and i.address not in ['172.17.0.1', '127.0.0.1']:
ips.append(i.address)
if not ips:
return None
else:
return ips[-1]
def volumes(self, iso=False):
if iso:
isos = []
for pool in self.conn.system_service().storage_domains_service().list():
sd_service = self.sds_service.storage_domain_service(pool.id)
if str(pool.type) == 'iso':
file_service = sd_service.files_service()
for isofile in file_service.list():
isos.append(isofile._name)
else:
disks_service = sd_service.disks_service()
for disk in disks_service.list():
if disk.name.endswith('.iso'):
isos.append(disk.name)
return isos
else:
images = []
templates_service = self.templates_service
templateslist = templates_service.list()
for template in templateslist:
if template.name != 'Blank':
images.append(template.name)
return images
def delete(self, name, snapshots=False):
vmsearch = self.vms_service.list(search='name=%s' % name)
if not vmsearch:
error("VM %s not found" % name)
return {'result': 'failure', 'reason': "VM %s not found" % name}
vminfo = vmsearch[0]
vm = self.vms_service.vm_service(vminfo.id)
if str(vminfo.status) not in | |
Read chunks already present in a PNG image
reader = png.Reader(filename=image_path)
chunks = list(reader.chunks())
# Insert custom framerate chunk
chunks.insert(1, (FPS_CHUNK_CODE, struct.pack("f", framerate)))
# Write it into the image
with open(image_path, 'wb') as image_file:
png.write_chunks(image_file, chunks)
return True
def readFramerateChunk(self, image_path):
"""
Method to read a framerate inserted as one of a PNG image's chunks.
This method tries to import pyPNG first. Then it tries to install it and import
again. If either import is successful, all chunks currently in the PNG image at
an `image_path` are extracted. The framerate chunk ought to be stored as
the second chunk, right behind IHDR. If the chunk's code type matches,
its value is returned.
Args:
image_path: A str containing a path to an image with the framerate chunk.
Returns:
A float signifying framerate, or -1.0 if something failed.
"""
# import or install pyPNG
try:
import png
except ModuleNotFoundError:
self.installPyPNGNotice()
return -1.0
# Read chunks already present in a PNG image
reader = png.Reader(filename=image_path)
chunks = list(reader.chunks())
if chunks[1][0] == FPS_CHUNK_CODE:
return struct.unpack("f", chunks[1][1])[0]
else:
FreeCAD.Console.PrintError("Unable to unpack a framerate.\n")
return -1.0
class ControlProxy:
"""
Proxy class for a `DocumentObjectGroupPython` Control instance.
A ControlProxy instance adds properties to a `DocumentObjectGroupPython`
Control instance and responds to their changes. It provides a control panel
to control animations.
To access such a dialog double-click Control in Tree View or right click and
select *Show control panel* option from a context menu.
Attributes:
updated: A bool - True if a property was changed by a class and not user.
temporary_export_path: A str path to an export folder.
To connect this `Proxy` object to a `DocumentObjectGroupPython` Control do:
a = FreeCAD.ActiveDocument.addObject("App::DocumentObjectGroupPython",
"Control")
ControlProxy(a)
"""
updated = False
def __init__(self, fp):
"""
Initialization method for ControlProxy.
A class instance is created and made a `Proxy` for a generic
`DocumentObjectGroupPython` Control object. During initialization number of
properties are specified and preset.
Args:
fp: A barebone `DocumentObjectGroupPython` Control object to be extended.
"""
self.setProperties(fp)
fp.Proxy = self
def onDocumentRestored(self, fp):
"""
Method called when document is restored to make sure everything is as it was.
Reinitialization method - it creates properties and sets them to default, if
they were not restored automatically. Properties of connected `ViewObject` are
also recreated and reset if necessary.
Args:
fp : A restored `DocumentObjectGroupPython` Control object.
"""
fp.ViewObject.Proxy.setProperties(fp.ViewObject)
self.setProperties(fp)
def onBeforeChange(self, fp, prop):
"""
Method called before `DocumentObjectGroupPython` Control is changed.
An old export path is stored for a case in which a new export path is not
a valid path.
Args:
fp : A `DocumentObjectGroupPython` Control object.
prop: A str name of a property about to change.
"""
# Save an export path before it's changed to restore it if new
# path is invalid
if prop == "ExportPath" and hasattr(fp, "ExportPath") and \
not self.updated:
self.temporary_export_path = fp.ExportPath
def onChanged(self, fp, prop):
"""
Method called after `DocumentObjectGroupPython` Control was changed.
Values of changed properties (start time, step time, stop time, export path)
are checked for validity and edited if they are not.
Args:
fp : A `DocumentObjectGroupPython` Control object.
prop: A str name of a changed property.
"""
# Don't do anything if a value was updated because another property
# had changed
if self.updated:
self.updated = False
return
# Control animation range so that step size is less than range size
elif prop == "StartTime" and hasattr(fp, "StopTime") and \
hasattr(fp, "StepTime"):
self.updated = True
fp.StopTime = (fp.StopTime, fp.StartTime + fp.StepTime,
float("inf"), 0.5)
self.updated = True
fp.StepTime = (fp.StepTime, 0.01, fp.StopTime - fp.StartTime, 0.1)
elif prop == "StepTime" and hasattr(fp, "StartTime") and \
hasattr(fp, "StopTime"):
self.updated = True
fp.StopTime = (fp.StopTime, fp.StartTime + fp.StepTime,
float("inf"), 0.5)
self.updated = True
fp.StartTime = (fp.StartTime, -float("inf"),
fp.StopTime - fp.StepTime, 0.5)
elif prop == "StopTime" and hasattr(fp, "StartTime") and \
hasattr(fp, "StepTime"):
self.updated = True
fp.StartTime = (fp.StartTime, -float("inf"),
fp.StopTime - fp.StepTime, 0.5)
self.updated = True
fp.StepTime = (fp.StepTime, 0.01, fp.StopTime - fp.StartTime, 0.1)
# Return to previous export path if the new one is invalid
elif prop == "ExportPath":
# Test access right in the folder an show warning if they are not
# sufficient
if not os.access(fp.ExportPath, os.W_OK | os.R_OK):
QMessageBox.warning(None, 'Error while setting Export Path',
"You don't have access to read and write "
+ "in this folder.")
self.updated = True
fp.ExportPath = self.temporary_export_path
del self.temporary_export_path
def setProperties(self, fp):
"""
Method to set properties during initialization or document restoration.
The properties are set if they are not already present and an
`AnimateDocumentObserver` is recreated.
Args:
fp : A restored or barebone `DocumentObjectGroupPython` Control object.
"""
# Add (and preset) properties
if not hasattr(fp, "StartTime"):
fp.addProperty(
"App::PropertyFloatConstraint", "StartTime", "Timing",
"Animation start time. \nRange is "
"< - inf | Stop Time - Step Time >."
).StartTime = (0, -float("inf"), 9.5, 0.5)
elif hasattr(fp, "StepTime") and hasattr(fp, "StopTime"):
fp.StartTime = (fp.StartTime, -float("inf"),
fp.StopTime - fp.StepTime, 0.5)
if not hasattr(fp, "StepTime"):
fp.addProperty(
"App::PropertyFloatConstraint", "StepTime", "Timing",
"Animation step time. \nRange is "
"< 0.01 | Stop Time - Start Time >."
).StepTime = (0.5, 0.01, 10, 0.1)
elif hasattr(fp, "StartTime") and hasattr(fp, "StopTime"):
fp.StepTime = (fp.StepTime, 0.01, fp.StopTime - fp.StartTime, 0.1)
if not hasattr(fp, "StopTime"):
fp.addProperty(
"App::PropertyFloatConstraint", "StopTime", "Timing",
"Animation stop time. \nRange is "
+ "< Start Time + Step Time | inf >."
).StopTime = (10, 0.5, float("inf"), 0.5)
elif hasattr(fp, "StartTime") and hasattr(fp, "StepTime"):
fp.StopTime = (fp.StopTime, fp.StartTime + fp.StepTime,
float("inf"), 0.5)
if not hasattr(fp, "ExportPath"):
fp.addProperty(
"App::PropertyPath", "ExportPath", "Record & Export",
"Path to a folder, where recorded rendered images will be "
"saved to be converted into a video.")
if not hasattr(fp, "VideoWidth"):
fp.addProperty(
"App::PropertyIntegerConstraint", "VideoWidth",
"Record & Export", "Width of the exported video in pixels.\n"
+ "Range is < 32 | 7680 >.").VideoWidth = (1280, 32, 7680, 10)
else:
fp.VideoWidth = (fp.VideoWidth, 32, 7680, 10)
if not hasattr(fp, "VideoHeight"):
fp.addProperty(
"App::PropertyIntegerConstraint", "VideoHeight",
"Record & Export", "Height of the exported video in pixels.\n"
+ "Range is < 32 | 4320 >.").VideoHeight = (720, 32, 4320, 10)
else:
fp.VideoHeight = (fp.VideoHeight, 32, 4320, 10)
# Add an document observer to control the structure
import AnimateDocumentObserver
AnimateDocumentObserver.addObserver()
class ViewProviderControlProxy:
"""
Proxy class for `Gui.ViewProviderDocumentObject` Control.ViewObject.
A ViewProviderControlProxy instance provides a Control's icon, double-click
response and context menu with "Show control panel".
Attributes:
fp: A Control object.
panel: A ControlPanel if one is active or None.
To connect this `Proxy` object to a `Gui.ViewProviderDocumentObject`
Control.ViewObject do:
a = FreeCAD.ActiveDocument.addObject("App::DocumentObjectGroupPython",
"Control")
ViewProviderControlProxy(a.ViewObject)
"""
panel = None
fp = None
def __init__(self, vp):
"""
Initialization method for ViewProviderControlProxy.
A class instance is created and made a `Proxy` for a generic
`Gui.ViewProviderDocumentObject` Control.ViewObject. During initialization
number of properties are specified and preset.
Args:
vp: A barebone `Gui.ViewProviderDocumentObject` Control.ViewObject.
"""
self.setProperties(vp)
vp.Proxy = self
def attach(self, vp):
"""
Method called by FreeCAD after initialization.
This method adds Control as the `fp` attribute.
Args:
vp: A Control.ViewObject after initialization.
"""
# Add feature python as it's necessary to claimChildren
self.fp = vp.Object
def claimChildren(self):
"""
Method called by FreeCAD to retrieve assigned children.
When a property of a Control is touched the Control and the FreeCAD
ActiveDocument are notified. The FreeCAD ActiveDocument then emits a signal
to inform all its observers e.g. the FreeCADGui ActiveDocument. The FreeCADGui
document then emits a new signal to inform e.g. the tree view. The tree view
then invokes `claimChildren()`.
"""
if hasattr(self, "fp"):
if self.fp:
return self.fp.Group
return []
def canDropObject(self, obj):
"""
Method called by FreeCAD to ask if an object `obj` can be dropped into a Group.
FreeCAD objects of a Server, Trajectory and CollisionDetector type are allowed
to drop inside a Control group.
Args:
obj: A FreeCAD object hovering above a Control item in the Tree View.
"""
# Allow only some objects to be dropped into the Control group
if hasattr(obj, "Proxy") and \
(obj.Proxy.__class__.__name__ == "ServerProxy" or
obj.Proxy.__class__.__name__ == "TrajectoryProxy" or
obj.Proxy.__class__.__name__ == "CollisionDetectorProxy" or
obj.Proxy.__class__.__name__ == "RobWorldProxy" or
obj.Proxy.__class__.__name__ == "RobRotationProxy" or
obj.Proxy.__class__.__name__ == "RobTranslationProxy"):
return True
return False
def getIcon(self):
"""
Method called by FreeCAD to supply an icon for the Tree View.
A full path to an icon is supplied for the FreeCADGui.
Returns:
A str path to an icon.
"""
return path.join(PATH_TO_ICONS, "Control.png")
| |
editor.StyleSetForeground() for style 4\n'
'called editor.StyleSetItalic() for style 4\n'
'called editor.StyleSetForeground() for style 5\n'
'called editor.StyleSetItalic() for style 5\n'
'called editor.StyleSetForeground() for style 6\n'
'called editor.StyleSetBold() for style 6\n'
'called editor.StyleSetForeground() for style 7\n'
'called editor.StyleSetBold() for style 7\n'
'called editor.StyleSetForeground() for style 8\n'
'called editor.StyleSetBold() for style 8\n'
'called editor.StyleSetForeground() for style 9\n'
'called editor.StyleSetBold() for style 9\n'
'called editor.StyleSetForeground() for style 10\n'
'called editor.StyleSetBold() for style 10\n'
'called editor.StyleSetForeground() for style 11\n'
'called editor.StyleSetBold() for style 11\n'
'called editor.StyleSetBackground() for style 12\n'
'called editor.StyleSetForeground() for style 12\n'
'called editor.StyleSetForeground() for style 13\n'
'called editor.StyleSetForeground() for style 14\n'
'called editor.StyleSetForeground() for style 15\n'
'called editor.StyleSetBackground() for style 16\n'
'called editor.StyleSetForeground() for style 16\n'
'called editor.StyleSetForeground() for style 17\n'
'called editor.StyleSetBold() for style 17\n'
'called editor.StyleSetForeground() for style 18\n'
'called editor.StyleSetUnderline() for style 11\n'
'called editor.StyleSetBackground() for style 19\n'
'called editor.StyleSetForeground() for style 19\n'
'called editor.StyleSetBackground() for style 20\n'
'called editor.StyleSetForeground() for style 20\n'
'called editor.StyleSetBackground() for style 21\n'
'called editor.StyleSetForeground() for style 21\n'
"called editor.Bind() for method"
" MainWindow.OnEvtText\n")
def setup_text(self, monkeypatch, capsys):
"wordt uitgevoerd als onderdeel van setup_editor, dus geen aparte test nodig"
def test_create_menu(self, monkeypatch, capsys):
"test for initial creation of menubar"
def mock_getmenubar(*args):
return MockMenuBar()
def mock_get_menudata(*args):
self = args[0]
return (('other', ((_('m_forward'), self.callback, 'forward', 'Ctrl+PgDown'),
(_('m_back'), self.callback, 'back', 'Ctrl+PgUp,F2'),
('other', self.callback, 'other', 'Ctrl+D,Delete'), ), ),
(_("m_view"), ((_("m_revorder"), self.callback, _("h_revorder"), 'F9'),
("", None, None, None),
(_("m_selall"), self.callback, _("h_selall"), None),
(_("m_seltag"), self.callback, _("h_seltag"), None),
(_("m_seltxt"), self.callback, _("h_seltxt"), None), ), ), )
def mock_set_accel(*args):
print('called mainwindow.SetAcceleratorTable()')
monkeypatch.setattr(MockNoteTree, 'get_menudata', mock_get_menudata)
monkeypatch.setattr(gui.wx, 'Menu', MockMenu)
monkeypatch.setattr(gui.wx, 'MenuItem', MockMenuItem)
monkeypatch.setattr(gui.wx, 'AcceleratorEntry', MockAcceleratorEntry)
monkeypatch.setattr(gui.wx, 'AcceleratorTable', MockAcceleratorTable)
testobj = setup_mainwindow(monkeypatch)
testobj.base.opts['RevOrder'] = True
testobj.base.opts['Selection'] = (1, True)
testobj.tree = MockTree()
monkeypatch.setattr(MockMenuBar, 'GetMenus', lambda x: [])
monkeypatch.setattr(testobj, 'GetMenuBar', mock_getmenubar)
monkeypatch.setattr(testobj, 'SetAcceleratorTable', mock_set_accel)
testobj.create_menu()
assert list(testobj.selactions.keys()) == ["m_revorder", "m_selall", "m_seltag", "m_seltxt"]
assert testobj.seltypes == ["m_selall", "m_seltag", "m_seltxt"]
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
'called MenuBar.__init__()\n'
'called Menu.__init__()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called menuitem.GetId()\n'
'called AcceleratorEntry.__init__()\n'
'called MockAcceleratorEntry.FromString()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called menuitem.GetId()\n'
'called AcceleratorEntry.__init__()\n'
'called MockAcceleratorEntry.FromString()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menuitem.GetId()\n'
'called AcceleratorEntry.__init__()\n'
'called MockAcceleratorEntry.FromString()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menuitem.GetId()\n'
'called AcceleratorEntry.__init__()\n'
'called MockAcceleratorEntry.FromString()\n'
'called AcceleratorTable.__init__()\n'
'called tree.SetAcceleratorTable()\n'
'called menubar.Append()\n'
'called Menu.__init__()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called MenuItem.__init__()\n'
'called menu.Append()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called menubar.Append()\n'
'called AcceleratorTable.__init__()\n'
'called mainwindow.SetAcceleratorTable()\n'
'called menuitem.Check(`False`)\n'
'called menuitem.Check(`False`)\n'
'called menuitem.Check(`False`)\n'
'called menuitem.Check(`False`)\n'
'called menuitem.Check(`True`)\n'
'called menuitem.Check(`True`)\n')
def test_create_menu_2(self, monkeypatch, capsys):
"test for recreation of menubar"
def mock_getmenubar(*args):
return MockMenuBar()
def mock_get_menudata(*args):
self = args[0]
return ( ('other', ((_('m_forward'), self.callback, 'forward', 'Ctrl+PgDown'),
(_('m_back'), self.callback, 'back', 'Ctrl+PgUp,F2'),
('other', self.callback, 'other', 'Ctrl+D,Delete'), ), ),
(_("m_view"), ((_("m_revorder"), self.callback, _("h_revorder"), 'F9'),
("", None, None, None),
(_("m_selall"), self.callback, _("h_selall"), None),
(_("m_seltag"), self.callback, _("h_seltag"), None),
(_("m_seltxt"), self.callback, _("h_seltxt"), None), ), ), )
def mock_set_accel(*args):
print('called mainwindow.SetAcceleratorTable()')
monkeypatch.setattr(MockNoteTree, 'get_menudata', mock_get_menudata)
monkeypatch.setattr(gui.wx, 'Menu', MockMenu)
monkeypatch.setattr(gui.wx, 'MenuItem', MockMenuItem)
monkeypatch.setattr(gui.wx, 'AcceleratorEntry', MockAcceleratorEntry)
monkeypatch.setattr(gui.wx, 'AcceleratorTable', MockAcceleratorTable)
testobj = setup_mainwindow(monkeypatch)
testobj.base.opts['RevOrder'] = True
testobj.base.opts['Selection'] = (1, True)
testobj.tree = MockTree()
monkeypatch.setattr(testobj, 'GetMenuBar', mock_getmenubar)
monkeypatch.setattr(testobj, 'SetAcceleratorTable', mock_set_accel)
testobj.create_menu()
assert list(testobj.selactions.keys()) == ["m_revorder", "m_selall", "m_seltag", "m_seltxt"]
assert testobj.seltypes == ["m_selall", "m_seltag", "m_seltxt"]
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
'called MenuBar.__init__()\n'
'called menubar.GetMenus()\n'
'called Menu.__init__()\n'
'called Menu.__init__()\n'
'called Menu.__init__()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called menuitem.GetId()\n'
'called AcceleratorEntry.__init__()\n'
'called MockAcceleratorEntry.FromString()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called menuitem.GetId()\n'
'called AcceleratorEntry.__init__()\n'
'called MockAcceleratorEntry.FromString()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menuitem.GetId()\n'
'called AcceleratorEntry.__init__()\n'
'called MockAcceleratorEntry.FromString()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menuitem.GetId()\n'
'called AcceleratorEntry.__init__()\n'
'called MockAcceleratorEntry.FromString()\n'
'called AcceleratorTable.__init__()\n'
'called tree.SetAcceleratorTable()\n'
'called menubar.Replace()\n'
'called menu.Destroy()\n'
'called Menu.__init__()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called MenuItem.__init__()\n'
'called menu.Append()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called MenuItem.__init__()\n'
'called menuitem.Bind()\n'
'called menu.Append()\n'
'called menubar.Replace()\n'
'called menu.Destroy()\n'
'called AcceleratorTable.__init__()\n'
'called mainwindow.SetAcceleratorTable()\n'
'called menuitem.Check(`False`)\n'
'called menuitem.Check(`False`)\n'
'called menuitem.Check(`False`)\n'
'called menuitem.Check(`False`)\n'
'called menuitem.Check(`True`)\n'
'called menuitem.Check(`True`)\n')
def test_OnEvtText(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.editor = MockEditor()
testobj.OnEvtText('x')
assert testobj.editor.IsModified
def OnSelChanging(self, monkeypatch, capsys):
"deze methode is wel gedfinieerd maar leeggelaten"
def test_OnSelChanged(self, monkeypatch, capsys):
def mock_check_active(*args):
print('called notetree.check_active()')
def mock_activate_item(*args):
print('called notetree.activate_item(`{}`)'.format(args[0]))
testobj = setup_mainwindow(monkeypatch)
testobj.root = 'root'
testobj.OnSelChanged(MockEvent())
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called event.__init__()\n'
'called base.check_active()\n'
'in onselchanged: item is treeitem, root is root\n'
'called base.activate_item() with arg `treeitem`\n'
'called event.Skip()\n')
def test_close(self, monkeypatch, capsys):
def mock_close(*args):
print('called mainwindow.Close()')
def mock_update(*args):
print('called notetree.update()')
testobj = setup_mainwindow(monkeypatch)
monkeypatch.setattr(testobj, 'Close', mock_close)
monkeypatch.setattr(testobj.base, 'update', mock_update)
testobj.activeitem = None
testobj.close('event')
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called mainwindow.Close()\n')
testobj = setup_mainwindow(monkeypatch)
monkeypatch.setattr(testobj, 'Close', mock_close)
monkeypatch.setattr(testobj.base, 'update', mock_update)
testobj.activeitem = 'item'
testobj.close('event')
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called notetree.update()\n'
'called mainwindow.Close()\n')
def test_clear_editor(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.editor = MockEditor()
testobj.clear_editor()
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockEditor.__init__()\n'
'called editor.Clear()\n'
'called editor.Enable(`False`)\n')
def test_open_editor(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.editor = MockEditor()
testobj.open_editor()
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockEditor.__init__()\n'
'called editor.Enable(`True`)\n')
def test_set_screen(self, monkeypatch, capsys):
def mock_setsize(*args):
print('called frame.SetSize({})'.format(args[0]))
testobj = setup_mainwindow(monkeypatch)
monkeypatch.setattr(testobj, 'SetSize', mock_setsize)
testobj.set_screen('screensize')
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called frame.SetSize(screensize)\n')
def test_set_splitter(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.splitter = MockSplitter()
testobj.set_splitter('split')
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockSplitter.__init__()\n'
'called splitter.SetSashPosition()\n')
def test_create_root(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.tree = MockTree()
assert testobj.create_root('title') == testobj.root
assert testobj.activeitem == testobj.root
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
'called tree.DeleteAllItems()\n'
'called tree.AddRoot()\n')
def test_set_item_expanded(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.tree = MockTree()
testobj.set_item_expanded('item')
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
'called tree.Expand()\n')
def test_emphasize_activeitem(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.tree = MockTree()
testobj.emphasize_activeitem('value')
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
'called tree.SetItemBold() using value\n')
def test_editor_text_was_changed(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.editor = MockEditor()
assert testobj.editor_text_was_changed() == 'ismodified'
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockEditor.__init__()\n')
def test_copy_text_from_editor_to_activeitem(self, monkeypatch, capsys):
def mock_set_itemtext(*args):
print('set text of `{}` to `{}`'.format(args[0], args[1]))
testobj = setup_mainwindow(monkeypatch)
testobj.activeitem = 'active item'
testobj.editor = MockEditor()
monkeypatch.setattr(testobj, 'set_item_text', mock_set_itemtext)
testobj.copy_text_from_editor_to_activeitem()
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockEditor.__init__()\n'
'set text of `active item` to `fake editor value`\n')
def test_copy_text_from_activeitem_to_editor(self, monkeypatch, capsys):
def mock_get_itemtext(*args):
return 'item text'
testobj = setup_mainwindow(monkeypatch)
testobj.activeitem = 'active item'
testobj.editor = MockEditor()
monkeypatch.setattr(testobj, 'get_item_text', mock_get_itemtext)
testobj.copy_text_from_activeitem_to_editor()
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockEditor.__init__()\n'
'setting editor text to `item text`\n')
def test_select_item(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.tree = MockTree()
testobj.select_item('item')
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
'called tree.SelectItem()\n')
def test_get_selected_item(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.tree = MockTree()
assert testobj.get_selected_item() == 'selected_item'
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
)
def test_remove_item_from_tree(self, monkeypatch, capsys):
"test for removing any item except last one"
testobj = setup_mainwindow(monkeypatch)
testobj.tree = MockTree()
testobj.remove_item_from_tree('item')
assert testobj.activeitem is None
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
'called tree.GetNextSibling()\n'
'called MockTreeItem.__init__()\n'
'called tree.Delete()\n')
def test_remove_item_from_tree_2(self, monkeypatch, capsys):
"test for removing last item"
def mock_get_next(self, *args):
print('called tree.GetNextSibling()')
return MockTreeItem('not ok')
testobj = setup_mainwindow(monkeypatch)
monkeypatch.setattr(MockTree, 'GetNextSibling', mock_get_next)
testobj.tree = MockTree()
testobj.remove_item_from_tree('item')
assert testobj.activeitem is None
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
'called tree.GetNextSibling()\n'
'called MockTreeItem.__init__()\n'
'called tree.GetPrevSibling()\n'
'called MockTreeItem.__init__()\n'
'called tree.Delete()\n'
'called tree.SelectItem()\n')
def test_get_key_from_item(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.tree = MockTree()
assert testobj.get_key_from_item('item') == 'itemkey'
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n')
def test_get_activeitem_title(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.tree = MockTree()
assert testobj.get_activeitem_title() == 'itemtext'
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
)
def test_set_activeitem_title(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.tree = MockTree()
testobj.set_activeitem_title('title')
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called app.__init__()\n'
'called MockTree.__init__()\n'
'called tree.SetItemText()\n')
def test_set_focus_to_tree(self, monkeypatch, capsys):
testobj = setup_mainwindow(monkeypatch)
testobj.tree = MockTree()
testobj.set_focus_to_tree()
assert capsys.readouterr().out == ('called MockNoteTree.__init__()\n'
'called | |
<reponame>ceridwen/combinators
'''Parser combinators for Python 2, based on the grammar in the Python
documentation. Call the parse method of single_input, file_input, and
eval_input.'''
import argparse
import pprint
import token
import tokenize
import types
import continuation_gll_combinators as combinators
class Token(combinators.Terminal):
def __init__(self, token, **kws):
vars(self)['token'] = token
vars(self)['combinators'] = (self,)
def _parse(self, trampoline, success, failure, stream, index):
if len(stream) - index <= 0:
return failure('Unexpected end of stream (expected %r)' % self.token, index)
elif stream[index][0] != self.token:
return failure('Expected %r got %r' % (self.token, stream[index]), index)
else:
result = stream[index]
index += 1
return success(result, failure, index)
def __str__(self):
return 'Token(%s)' % self.token
__repr__ = __str__
class KeywordOrOp(Token):
def __init__(self, token, string, **kws):
super(KeywordOrOp, self).__init__(token, **kws)
vars(self)['string'] = string
def _parse(self, trampoline, success, failure, stream, index):
# Because this is in continuation-passing style, there's not
# really a good way to reuse the code from Token._parse().
if len(stream) - index <= 0:
return failure('Unexpected end of stream (expected %r)' % self.token, index)
elif stream[index][0] != self.token or stream[index][1] != self.string:
return failure('Expected (%r, %r) got %r' % (self.token, self.string, stream[index]), index)
else:
result = stream[index]
index += 1
return success(result, failure, index)
def __str__(self):
return 'KeywordOrOp(%s, %s)' % (self.token, self.string)
__repr__ = __str__
# The names of all the tokens are taken from the token.tok_name
# dictionary. Note that Token(op) is never used. As the tokenize
# documentation states, "To simplify token stream handling, all
# Operators and Delimiters tokens are returned using the generic
# token.OP token type." (It certainly doesn't simplify parsing.)
# Keywords also always have token.NAME as their first value. The
# lists of lists of keywords, delimiters, and operators are taken from
# https://docs.python.org/2/reference/lexical_analysis.html
TOKENS = types.MappingProxyType({name: Token(name) for name in token.tok_name.values()})
KEYWORDS = types.MappingProxyType({keyword: KeywordOrOp('NAME', keyword) for keyword in ('and', 'del', 'from', 'not', 'while', 'as', 'elif', 'global', 'or', 'with', 'assert', 'else', 'if', 'pass', 'yield', 'break', 'except', 'import', 'print', 'class', 'exec', 'in', 'raise', 'continue', 'finally', 'is', 'return', 'def', 'for', 'lambda', 'try')})
OPS = types.MappingProxyType({op: KeywordOrOp('OP', op) for op in ('+', '-', '*', '**', '/', '//', '%', '<<', '>>', '&', '|', '^', '~', '<', '>', '<=', '>=', '==', '!=', '<>', '(', ')', '[', ']', '{', '}', '@', ',', ':', '.', '`', '=', ';', '+=', '-=', '*=', '/=', '//=', '%=', '&=', '|=', '^=', '>>=', '<<=', '**=')})
class Succeed(combinators.Combinator):
def __init__(self, **kws):
pass
def _parse(self, trampoline, success, failure, stream, index):
return success('', failure, index)
def star(combinator):
_ = Succeed() | (combinator + combinators.Lazy(lambda: _))
return _
def plus(combinator):
_ = combinator + (combinators.Lazy(lambda: _) | Succeed())
return _
def option(combinator):
return (combinator | Succeed())
def ops_alternation(*strings):
return combinators.Alternation(*[OPS[string] for string in strings])
def max_results(results):
max_results = [combinators.Success('', '', -1)]
for result in results:
if result.index > max_results[0].index:
max_results = [result]
elif result.index == max_results[0].index:
max_results.append(result)
return max_results
# tokenize generates a different token for newlines that occur after
# code and newlines on blank lines, but the grammar doesn't take
# account of the difference, so this combinator represents both.
# Comments may always optionally occur before newlines, but aren't
# included in the grammar.
newline = option(TOKENS['COMMENT']) + (TOKENS['NEWLINE'] | TOKENS['NL'])
# The grammar is taken from
# https://docs.python.org/2/reference/grammar.html . The order is
# changed because Python's eager evaluation means that lower-level
# entries have to occur first.
# comp_op = OPS['<'] | OPS['>'] | OPS['=='] | OPS['>='] | OPS['<='] | OPS['<>'] | OPS['!='] | KEYWORDS['in'] | KEYWORDS['not'] + KEYWORDS['in'] | KEYWORDS['is'] | KEYWORDS['is'] + KEYWORDS['not']
single_input = newline | combinators.Lazy(lambda: simple_stmt) | combinators.Lazy(lambda: compound_stmt + newline)
file_input = star(newline | combinators.Lazy(lambda: stmt)) + TOKENS['ENDMARKER']
eval_input = combinators.Lazy(lambda: testlist) + star(newline) + TOKENS['ENDMARKER']
augassign = ops_alternation('+=', '-=', '*=', '/=', '%=', '&=', '|=',
'^=', '<<=', '>>=', '**=', '//=')
expr_stmt = combinators.Lazy(lambda: testlist) + (augassign + (combinators.Lazy(lambda: yield_expr) | combinators.Lazy(lambda: testlist)) | star(OPS['='] + (combinators.Lazy(lambda: yield_expr) | combinators.Lazy(lambda: testlist))))
print_stmt = KEYWORDS['print'] + option(combinators.Lazy(lambda: test) + star(OPS[',']) + option(OPS[','])) | OPS['>>'] + combinators.Lazy(lambda: test) + option(plus(OPS[','] + combinators.Lazy(lambda: test)) + option(OPS[',']))
del_stmt = KEYWORDS['del'] + combinators.Lazy(lambda: exprlist)
pass_stmt = KEYWORDS['pass']
break_stmt = KEYWORDS['break']
continue_stmt = KEYWORDS['continue']
return_stmt = KEYWORDS['return'] + option(combinators.Lazy(lambda: testlist))
yield_expr = KEYWORDS['yield'] + option(combinators.Lazy(lambda: testlist))
yield_stmt = yield_expr
raise_stmt = KEYWORDS['raise'] + option(combinators.Lazy(lambda: test) + option(OPS[','] + option(OPS[','] + combinators.Lazy(lambda: test))))
flow_stmt = break_stmt | continue_stmt | return_stmt | yield_stmt
dotted_name = TOKENS['NAME'] + star(OPS['.'] + TOKENS['NAME'])
dotted_as_name = dotted_name + option(KEYWORDS['as'] + TOKENS['NAME'])
dotted_as_names = dotted_as_name + star(OPS[','] + dotted_as_name)
decorator = OPS['@'] + dotted_name + option(OPS['('] + option(combinators.Lazy(lambda: arglist)) + OPS[')']) + newline
decorators = plus(decorator)
decorated = decorators + (combinators.Lazy(lambda: classdef) | combinators.Lazy(lambda: funcdef))
parameters = OPS['('] + option(combinators.Lazy(lambda: varargslist)) + OPS[')']
funcdef = KEYWORDS['def'] + TOKENS['NAME'] + parameters + OPS[':'] + combinators.Lazy(lambda: suite)
fpdef = TOKENS['NAME'] | OPS['('] + combinators.Lazy(lambda: fplist) + OPS[')']
fplist = fpdef + star(OPS[','] + fpdef) + option(OPS[','])
varargslist = (star(fpdef + option(OPS['='] + combinators.Lazy(lambda: test)) + OPS[',']) +
(OPS['*'] + TOKENS['NAME'] + option(OPS[','] + OPS['**'] + TOKENS['NAME']) | OPS['**'] + TOKENS['NAME'] |
fpdef + option(OPS['='] + combinators.Lazy(lambda: test)) + star(OPS[','] + fpdef + option(OPS['='] + combinators.Lazy(lambda: test))) + option(OPS[','])))
import_name = KEYWORDS['import'] + dotted_as_names
import_as_name = TOKENS['NAME'] + option(KEYWORDS['as'] + TOKENS['NAME'])
import_as_names = import_as_name + star(OPS[','] + import_as_name) + option(OPS[','])
import_from = (KEYWORDS['from'] + (star(OPS['.']) + dotted_name | plus(OPS['.'])) +
KEYWORDS['import'] + (OPS['*'] | OPS['('] + import_as_names + OPS[')']) | import_as_names)
import_stmt = import_name | import_from
global_stmt = KEYWORDS['global'] + TOKENS['NAME'] + option(OPS[','] + TOKENS['NAME'])
exec_stmt = KEYWORDS['exec'] + combinators.Lazy(lambda: expr) + KEYWORDS['in'] + option(KEYWORDS['in'] + combinators.Lazy(lambda: test) + option(OPS[','] + combinators.Lazy(lambda: test)))
assert_stmt = KEYWORDS['assert'] + combinators.Lazy(lambda: test) + option(OPS[','] + combinators.Lazy(lambda: test))
small_stmt = expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt | import_stmt | global_stmt | exec_stmt | assert_stmt
simple_stmt = small_stmt + star(OPS[';'] + small_stmt) + option(OPS[';']) + newline
# The grammar is wrong here because there can be multiple newlines
# (including commments) in a suite.
suite = simple_stmt | plus(newline) + TOKENS['INDENT'] + plus(combinators.Lazy(lambda: stmt)) + TOKENS['DEDENT']
if_stmt = KEYWORDS['if'] + combinators.Lazy(lambda: test) + OPS[':'] + suite + star(KEYWORDS['elif'] + combinators.Lazy(lambda: test) + OPS[':'] + suite) + option(KEYWORDS['else'] + OPS[':'] + suite)
while_stmt = KEYWORDS['while'] + combinators.Lazy(lambda: test) + OPS[':'] + suite + option(KEYWORDS['else'] + OPS[':'] + suite)
for_stmt = KEYWORDS['for'] + combinators.Lazy(lambda: exprlist) + KEYWORDS['in'] + combinators.Lazy(lambda: testlist) + OPS[':'] + suite + option(KEYWORDS['else'] + OPS[':'] + suite)
except_clause = KEYWORDS['except'] + option(combinators.Lazy(lambda: test) + option(KEYWORDS['as'] | OPS[',']) + combinators.Lazy(lambda: test))
try_stmt = (KEYWORDS['try'] + OPS[':'] + suite +
((plus(except_clause + OPS[':'] + suite)) +
option(KEYWORDS['else'] + OPS[':'] + suite) +
option(KEYWORDS['finally'] + OPS[':'] + suite) |
KEYWORDS['finally'] + OPS[':'] + suite))
with_item = combinators.Lazy(lambda: test) + option(KEYWORDS['as'] + combinators.Lazy(lambda: expr))
with_stmt = KEYWORDS['with'] + with_item + star(OPS[','] + with_item) + OPS[':'] + suite
compound_stmt = if_stmt | while_stmt | for_stmt | try_stmt | with_stmt | funcdef | combinators.Lazy(lambda: classdef) | decorated
stmt = simple_stmt | compound_stmt
old_test = combinators.Lazy(lambda: or_test) | combinators.Lazy(lambda: old_lambdef)
testlist_safe = old_test + option(plus(OPS[',']+ old_test) + OPS[','])
old_lambdef = KEYWORDS['lambda'] + option(varargslist) + OPS[':'] + old_test
comp_if = KEYWORDS['if'] + old_test + option(combinators.Lazy(lambda: comp_iter))
comp_for = KEYWORDS['for'] + combinators.Lazy(lambda: exprlist) + KEYWORDS['in'] + combinators.Lazy(lambda: or_test) + option(combinators.Lazy(lambda: comp_iter))
comp_iter = comp_for | comp_if
list_if = KEYWORDS['if'] + old_test + option(combinators.Lazy(lambda: list_iter))
list_for = KEYWORDS['for'] + combinators.Lazy(lambda: exprlist) + KEYWORDS['in'] + testlist_safe + option(combinators.Lazy(lambda: list_iter))
list_iter = list_for | list_if
argument = combinators.Lazy(lambda: test) + option(comp_for) | combinators.Lazy(lambda: test) + OPS['='] + combinators.Lazy(lambda: test)
arglist = star(argument + OPS[',']) + (argument + option(OPS[',']) | OPS['*'] + combinators.Lazy(lambda: test) + star(OPS[','] + argument) + option(OPS[','] + OPS['**'] + combinators.Lazy(lambda: test)) | OPS['**'] + combinators.Lazy(lambda: test))
classdef = KEYWORDS['class'] + TOKENS['NAME'] + option(OPS['('] + option(combinators.Lazy(lambda: testlist))) + OPS[')'] + OPS[':'] + suite
dictorsetmaker = (combinators.Lazy(lambda: test) + OPS[':'] + combinators.Lazy(lambda: test) + (comp_for | (star(OPS[','] + combinators.Lazy(lambda: test)) + OPS[',']))) | (combinators.Lazy(lambda: test) + (comp_for | (star(OPS[','] + combinators.Lazy(lambda: test)) + OPS[','])))
testlist = combinators.Lazy(lambda: test) + star(OPS[','] + combinators.Lazy(lambda: test)) + option(OPS[','])
exprlist = combinators.Lazy(lambda: expr) + star(OPS[','] + combinators.Lazy(lambda: expr)) + option(OPS[','])
sliceop = OPS[':'] + option(combinators.Lazy(lambda: test))
subscript = (OPS['.'] + OPS['.'] + OPS['.']) | combinators.Lazy(lambda: test) | option(combinators.Lazy(lambda: test)) + OPS[':'] + option(combinators.Lazy(lambda: test)) + option(sliceop)
subscriptlist = subscript + star(OPS[','] + subscript) + option(OPS[','])
trailer = (OPS['('] + option(arglist) + OPS[')']) | (OPS['['] + subscriptlist + OPS[']']) | (OPS['.'] + TOKENS['NAME'])
lambdef = KEYWORDS['lambda'] + option(varargslist) + OPS[':'] + combinators.Lazy(lambda: test)
testlist_comp = combinators.Lazy(lambda: test) + (comp_for | star(OPS[','] + combinators.Lazy(lambda: | |
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the PyMVPA package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
"""Little statistics helper"""
__docformat__ = 'restructuredtext'
from mvpa2.base import externals
if externals.exists('scipy', raise_=True):
import scipy.stats as st
# evaluate once the fact of life
__scipy_prior0101 = externals.versions['scipy'] < '0.10.1'
import numpy as np
import copy
def chisquare(obs, exp='uniform'):
"""Compute the chisquare value of a contingency table with arbitrary
dimensions.
Parameters
----------
obs : array
Observations matrix
exp : ('uniform', 'indep_rows') or array, optional
Matrix of expected values of the same size as `obs`. If no
array is given, then for 'uniform' -- evenly distributes all
observations. In 'indep_rows' case contingency table takes into
account frequencies relative across different columns, so, if
the contingency table is predictions vs targets, it would
account for dis-balance among different targets. Although
'uniform' is the default, for confusion matrices 'indep_rows' is
preferable.
Returns
-------
tuple
chisquare-stats, associated p-value (upper tail)
"""
obs = np.array(obs)
# get total number of observations
nobs = np.sum(obs)
# if no expected value are supplied assume equal distribution
if not isinstance(exp, np.ndarray):
ones = np.ones(obs.shape, dtype=float)
if exp == 'indep_rows':
# multiply each column
exp = np.sum(obs, axis=0)[None, :] * ones / obs.shape[0]
elif exp == 'indep_cols':
# multiply each row
exp = np.sum(obs, axis=1)[:, None] * ones / obs.shape[1]
elif exp == 'uniform':
# just evenly distribute
exp = nobs * np.ones(obs.shape, dtype=float) / np.prod(obs.shape)
else:
raise ValueError, \
"Unknown specification of expected values exp=%r" % (exp,)
else:
assert(exp.shape == obs.shape)
# make sure to have floating point data
exp = exp.astype(float)
# compute chisquare value
exp_zeros = exp == 0
exp_nonzeros = np.logical_not(exp_zeros)
if np.sum(exp_zeros) != 0 and (obs[exp_zeros] != 0).any():
raise ValueError, \
"chisquare: Expected values have 0-values, but there are actual" \
" observations -- chi^2 cannot be computed"
chisq = np.sum(((obs - exp) ** 2)[exp_nonzeros] / exp[exp_nonzeros])
# return chisq and probability (upper tail)
# taking only the elements with something expected
return chisq, st.chisqprob(chisq, np.sum(exp_nonzeros) - 1)
class DSMatrix(object):
"""DSMatrix allows for the creation of dissilimarity matrices using
arbitrary distance metrics.
"""
# metric is a string
def __init__(self, data_vectors, metric='spearman'):
"""Initialize DSMatrix
Parameters
----------
data_vectors : ndarray
m x n collection of vectors, where m is the number of exemplars
and n is the number of features per exemplar
metric : string
Distance metric to use (e.g., 'euclidean', 'spearman', 'pearson',
'confusion')
"""
# init members
self.full_matrix = []
self.u_triangle = None
self.vector_form = None
self._u_triangle_vecs = None # vectorized versions
# this one we know straight away, so set it
self.metric = metric
# size of dataset (checking if we're dealing with a column vector only)
num_exem = np.shape(data_vectors)[0]
flag_1d = False
# changed 4/26/09 to new way of figuring out if array is 1-D
#if (isinstance(data_vectors, np.ndarray)):
if (not(num_exem == np.size(data_vectors))):
num_features = np.shape(data_vectors)[1]
else:
flag_1d = True
num_features = 1
# generate output (dissimilarity) matrix
dsmatrix = np.mat(np.zeros((num_exem, num_exem)))
if (metric == 'euclidean'):
#print 'Using Euclidean distance metric...'
# down rows
for i in range(num_exem):
# across columns
for j in range(num_exem):
if (not(flag_1d)):
dsmatrix[i, j] = np.linalg.norm(
data_vectors[i, :] - data_vectors[j, :])
else:
dsmatrix[i, j] = np.linalg.norm(
data_vectors[i] - data_vectors[j])
elif (metric == 'spearman'):
#print 'Using Spearman rank-correlation metric...'
# down rows
for i in range(num_exem):
# across columns
for j in range(num_exem):
dsmatrix[i, j] = 1 - st.spearmanr(
data_vectors[i, :], data_vectors[j, :])[0]
elif (metric == 'pearson'):
dsmatrix = np.corrcoef(data_vectors)
elif (metric == 'confusion'):
#print 'Using confusion correlation metric...'
# down rows
for i in range(num_exem):
# across columns
for j in range(num_exem):
if (not(flag_1d)):
dsmatrix[i, j] = 1 - int(
np.floor(np.sum((
data_vectors[i, :] == data_vectors[j, :]
).astype(np.int32)) / num_features))
else:
dsmatrix[i, j] = 1 - int(
data_vectors[i] == data_vectors[j])
self.full_matrix = dsmatrix
##REF: Name was automagically refactored
def get_triangle(self):
# if we need to create the u_triangle representation, do so
if (self.u_triangle is None):
self.u_triangle = np.triu(self.full_matrix)
return self.u_triangle
def get_triangle_vector_form(self, k=0):
'''
Returns values from a triangular part of the matrix in vector form
Parameters
----------
k: int
offset from diagonal. k=0 means all values from the diagonal and those
above it, k=1 all values from the cells above the diagonal, etc
Returns
-------
v: np.ndarray (vector)
array with values from the similarity matrix. If the matrix is shaped
p xp, then if k>=0, then v has (p-k)*(p-k+1)/2 elements. If k<0, it has
p*p-(p+k)*(p+k-1)/2 elements.
'''
n = self.full_matrix.shape[0]
if k < -n or k > n:
raise IndexError("Require %d <= k <= %d" % (-n, n))
if self._u_triangle_vecs is None:
self._u_triangle_vecs = dict()
if not k in self._u_triangle_vecs:
msk = np.zeros((n, n), dtype=np.bool_)
for i in range(n):
for j in range(n):
if i < j + k:
break
msk[i, j] = np.True_
self._u_triangle_vecs[k] = self.full_matrix[msk]
return self._u_triangle_vecs[k]
# create the dissimilarity matrix on the (upper) triangle of the two
# two dissimilarity matrices; we can just reuse the same dissimilarity
# matrix code, but since it will return a matrix, we need to pick out
# either dsm[0,1] or dsm[1,0]
# note: this is a bit of a kludge right now, but it's the only way to solve
# certain problems:
# 1. Set all 0-valued elements in the original matrix to -1 (an impossible
# value for a dissimilarity matrix)
# 2. Find the upper triangle of the matrix
# 3. Create a vector from the upper triangle, but only with the
# elements whose absolute value is greater than 0 -- this
# will keep everything from the original matrix that wasn't
# part of the zero'ed-out portion when we took the upper
# triangle
# 4. Set all the -1-valued elements in the vector to 0 (their
# original value)
# 5. Cast to numpy array
##REF: Name was automagically refactored
def get_vector_form(self):
if (self.vector_form is not None):
return self.vector_form
orig_dsmatrix = copy.deepcopy(self.get_full_matrix())
orig_dsmatrix[orig_dsmatrix == 0] = -1
orig_tri = np.triu(orig_dsmatrix)
vector_form = orig_tri[abs(orig_tri) > 0]
vector_form[vector_form == -1] = 0
self.vector_form = np.asarray(vector_form)
return self.vector_form
# XXX is there any reason to have these get* methods
# instead of plain access to full_matrix and method?
##REF: Name was automagically refactored
def get_full_matrix(self):
return self.full_matrix
##REF: Name was automagically refactored
def get_metric(self):
return self.metric
def _chk_asanyarray(a, axis):
a = np.asanyarray(a)
if axis is None:
a = a.ravel()
outaxis = 0
else:
outaxis = axis
return a, outaxis
def ttest_1samp(a, popmean=0, axis=0, mask=None, alternative='two-sided'):
"""
Calculates the T-test for the mean of ONE group of scores `a`.
This is a refinement for the :func:`scipy.stats.ttest_1samp` for
the null hypothesis testing that the expected value (mean) of a
sample of independent observations is equal to the given
population mean, `popmean`. It adds ability to test carry single
tailed test as well as operate on samples with varying number of
active measurements, as specified by `mask` argument.
Since it is only a refinement and otherwise it should perform the
same way as the original ttest_1samp -- the name was overloaded.
Note
----
Initially it was coded before discovering scipy.mstats which
should work with masked arrays. But ATM (scipy 0.10.1) its
ttest_1samp does not support axis argument making it of limited
use anyways.
Parameters
----------
a : array_like
sample observations
popmean : float or array_like
expected value in null hypothesis, if array_like than it must have the
same shape as `a` excluding the axis dimension
axis : int, optional, (default axis=0)
Axis can equal None (ravel array first), or an integer (the axis
over which to operate | |
import logging
logger = logging.getLogger(__name__)
from models import Session, Card, CardDetail, Stack
from models import CARD_KIND_WEAPON, CARD_KIND_POTION, CARD_KIND_MONSTER, CARD_KIND_SCRAP, CARD_KIND_TREASURE
import random
HEALTH_CAPACITY = 20
REQUIRED_TURNS_BEFORE_SKIPPING = 5
ROOM_CAPACITY = 5
TREASURE_CAPACITY = 10
FORGE_CAPACITY = 10
DISCARD_CAPACITY = 10
def roll(min, max):
"""
Returns a random number between `min` and `max`.
"""
return random.randint(min, max)
def get_random_card_id_in_value_range(min, max, offset):
"""
Randomly picks a card ranged between `min` and `max` from a given offset.
The offset determines the type of card.
"""
card_id = roll(
min + offset,
max + offset)
return card_id
def get_random_weapon_id_in_value_range(min, max):
if min < 2 or max > 10:
return None
return get_random_card_id_in_value_range(min, max, 1)
def get_random_potion_id_in_value_range(min, max):
if min < 2 or max > 10:
return None
return get_random_card_id_in_value_range(min, max, 10)
def get_random_monster_id_in_value_range(min, max):
if min < 2 or max > 14:
return None
return get_random_card_id_in_value_range(min, max, 19)
def start(player):
"""
Attempts creating a new game session for the given player.
"""
if not player:
return None
# Initialize all the stacks.
room_stack = Stack()
room_stack.save()
you_stack = Stack()
you_stack.save()
equipment_stack = Stack()
equipment_stack.save()
forge_stack = Stack()
forge_stack.save()
treasure_stack = Stack()
treasure_stack.save()
discard_stack = Stack()
discard_stack.save()
# Begin a new session.
session = Session(
health=HEALTH_CAPACITY,
# Important to note that a session has to be tied to a player. Same goes for
# cards and stacks; they must, ultimately, be tied to a session. Otherwise
# it would be possible to move cards between sessions.
belongs_to_player=player,
room_stack=room_stack,
you_stack=you_stack,
equipment_stack=equipment_stack,
forge_stack=forge_stack,
treasure_stack=treasure_stack,
discard_stack=discard_stack
)
session.save()
# Draw the first 5 cards.
initial_room_cards = draw(session, ROOM_CAPACITY)
# Put the initial cards in place.
room_stack.push_many(initial_room_cards)
# If everything went as expected, activate the session by hooking it up to the player.
player.active_session = session
player.save()
return session
def draw_single(session, properties=None):
"""
Attempts drawing a single card.
Can optionally be given specific properties, determined randomly otherwise.
"""
if not session:
return None
card_should_be_special = False
if properties is None:
card_should_be_beneficial = roll(0, 100) >= 60 # 40 chance of not being a monster card
card_should_be_special = roll(0, 100) >= 95 # 5% chance of being special
details_id = None
if card_should_be_beneficial:
luck = roll(0, 100)
weapon_range = range(0, 45)
weapon_part_range = range(45, 75)
potion_range = range(75, 90)
treasure_range = range(90, 100)
if luck in weapon_part_range:
# Weapon Part
details_id = 1
# Mechanic not implemented yet.
card_should_be_special = False
elif luck in treasure_range:
# Treasure
details_id = 2
# Mechanic not implemented yet.
card_should_be_special = False
elif luck in weapon_range:
# Weapon (2-10)
details_id = get_random_weapon_id_in_value_range(2, 10)
elif luck in potion_range:
# Potion (2-10)
details_id = get_random_potion_id_in_value_range(2, 10)
# Mechanic not implemented yet.
card_should_be_special = False
else:
# Monster (2-14)
details_id = get_random_monster_id_in_value_range(2, 14)
# Mechanic not implemented yet.
card_should_be_special = False
if details_id is None:
return None
try:
properties = CardDetail.objects.get(pk=details_id)
except CardDetail.DoesNotExist:
return None
try:
card = Card(
belongs_to_session=session,
details=properties,
is_special=card_should_be_special
)
card.save()
except:
return None
try:
session.belongs_to_player.statistics.cards_drawn += 1
session.belongs_to_player.statistics.save()
except:
pass
return card
def draw(session, amount):
"""
Attempts drawing a specific amount of cards.
"""
if not session:
return None
if amount <= 0:
return None
cards = []
for i in range(0, amount):
card = draw_single(session)
if card is not None:
cards.append(card)
return cards
def can_activate_stack(session, stack):
"""
Determines whether a stack can be activated in its current state.
"""
if not session or session.is_lost() or not stack:
return False
# Assuming an empty stack can never be activated.
if stack.is_empty():
return False
if stack == session.room_stack:
# The current room can never be activated.
return False
if stack == session.discard_stack:
# The discarded stack can never be activated.
return False
if stack == session.equipment_stack or stack == session.you_stack:
# No special rules for weapons/monsters.
pass
if stack == session.forge_stack:
all_forgeable_cards = stack.all_cards()
if all_forgeable_cards:
amount_of_forgeable_cards = len(all_forgeable_cards)
if amount_of_forgeable_cards < 2:
# The forge stack can only be activated when at least 2 scrap cards are placed here.
return False
# todo: should forged cards always be special? (this means you can always override the current stack! potential game changer)
if not session.equipment_stack.is_empty():
return False
return True
def activate_stack(session, stack):
"""
Attempts activating/clearing a stack.
"""
if not session or session.is_lost() or not stack:
return False
if not can_activate_stack(session, stack):
return False
if stack == session.equipment_stack:
discard_many(session, session.equipment_stack.all_cards())
monster_cards = session.you_stack.all_cards()
monster_cards_discarded = discard_many(session, monster_cards)
score = (monster_cards_discarded * monster_cards_discarded)
if session.score_multiplier > 0:
score_percentage_multiplier = float(session.score_multiplier) / TREASURE_CAPACITY
score_bonus = score * (1 + score_percentage_multiplier)
score += score_bonus
session.score += score
session.score_multiplier = 0
session.save()
if stack == session.treasure_stack:
treasure_cards = session.treasure_stack.all_cards()
treasure_cards_discarded = discard_many(session, treasure_cards)
session.score_multiplier = treasure_cards_discarded
session.save()
if stack == session.forge_stack:
# Draw a new weapon card that is valued depending on how many cards were spent.
# Attempt discarding all cards that were spent creating a weapon.
value = discard_many(session, session.forge_stack.all_cards())
if value <= 0:
return False
details_id = get_random_weapon_id_in_value_range(value, value)
if details_id is None:
return False
try:
properties = CardDetail.objects.get(pk=details_id)
except CardDetail.DoesNotExist:
return False
# Draw the actual card, given the specific properties determined previously.
weapon_card = draw_single(session, properties)
# Attempt placing the new weapon on the equipment stack. Keep in mind that it is assumed
# that the equipment stack is empty when reaching this point.
did_equip_weapon_card = session.equipment_stack.push(weapon_card)
if not did_equip_weapon_card:
logger.error('boooooo!')
return False
return True
def can_activate_card(session, card):
"""
Determines whether a card has properties that allow it to be activated.
"""
if not session or session.is_lost() or not card:
return False
if card.details.kind is CARD_KIND_POTION:
if card.stack != session.you_stack:
# Can only be activated when placed on the You stack.
return False
if card.details.kind is CARD_KIND_MONSTER:
if card.stack != session.you_stack:
# Can only be activated when placed on the You stack.
return False
return True
def activate_card(session, card):
"""
Attempts activating a card.
This usually occurs when a card has been successfully moved from the current room.
"""
if not session or session.is_lost() or not card:
return False
if not can_activate_card(session, card):
return False
if card.details.kind is CARD_KIND_POTION:
restored_health = card.details.value
current_health = session.health
current_health += restored_health
if current_health > HEALTH_CAPACITY:
current_health = HEALTH_CAPACITY
try:
session.health = current_health
session.save()
except:
return False
discard(session, card)
if card.details.kind is CARD_KIND_MONSTER:
most_recently_played_weapon_card = session.equipment_stack.top()
if most_recently_played_weapon_card and most_recently_played_weapon_card.is_special:
try:
# Disable special status as soon as a monster has been placed.
most_recently_played_weapon_card.is_special = False
most_recently_played_weapon_card.save()
except:
return False
damage = card.details.value
if damage:
if most_recently_played_weapon_card:
damage -= most_recently_played_weapon_card.details.value
if damage > 0:
try:
new_health = session.health - damage
if new_health <= 0:
new_health = 0
session.health = new_health
session.save()
except:
return False
if not most_recently_played_weapon_card:
# Monsters only stack if player has a weapon equipped
session.score += 1
session.save()
discard(session, card)
return True
def can_move(session, card, to_stack):
"""
Determines whether a card can be moved to a given stack.
"""
if (not session or session.is_lost()
or not card
or not to_stack):
return False
if to_stack == session.room_stack:
# you can't move cards to the room...
logger.error(' * card can not be moved to the room!')
return False
if to_stack == session.treasure_stack:
if card.details.kind is not CARD_KIND_TREASURE:
# Not a treasure card, bail out...
logger.error(' * only treasure cards can be moved here!')
return False
if len(session.treasure_stack.all_cards()) >= TREASURE_CAPACITY:
# Treasure stack already holds maximum amount of treasure
logger.error(' * max treasure reached!')
return False
if to_stack == session.forge_stack:
if card.details.kind is not CARD_KIND_SCRAP:
# Not a scrap card, bail out...
logger.error(' * only scrap cards can be moved here!')
return False
if len(session.forge_stack.all_cards()) >= FORGE_CAPACITY:
# Forge stack already holds maximum amount of scraps
logger.error(' * max scraps reached!')
return False
if to_stack == session.equipment_stack:
if card.details.kind is not CARD_KIND_WEAPON:
# Not a weapon card, bail out...
logger.error(' * only weapon cards can be moved here!')
return False
most_recently_played_weapon_card = session.equipment_stack.top()
if most_recently_played_weapon_card is not None:
if not card.is_special:
# Only special cards can be placed on top of the previous weapon as a score multiplier.
logger.error(' * | |
<gh_stars>1-10
"""
To set up the OpenShift driver you need
* a workin OpenShift instance
* a user in the OpenShift instance (a separate machine to machine account is
recommended)
1. Start by adding the url, username, password and subdomain in the creds
file. names are "OSO_XXX_URL", where XXX is the name of your installation
(there can be multiple installations)
2. Restart Pebbles
3. Check out https://github.com/cscfi/notebook-images/
4. Log in as the M2M user using the *oc* command line utility
5. run build_openshift.sh to build and publish images to the OpenShift Docker registry
6. Enable OpenShiftDriver in the Admin UI
"""
import base64
import json
import time
import uuid
from pprint import pprint
import requests
# noinspection PyUnresolvedReferences
from six.moves.urllib.parse import urlparse, parse_qs
from pebbles.client import PBClient
from pebbles.drivers.provisioning import base_driver
from pebbles.utils import parse_maximum_lifetime
# maximum time to wait for pod creation before failing
MAX_POD_SPAWN_WAIT_TIME_SEC = 900
# maximum time to wait for pod (down) scaling
MAX_POD_SCALE_WAIT_TIME_SEC = 120
# refresh the token if it is this close to expiration
TOKEN_REFRESH_DELTA = 600
class OpenShiftClient(object):
"""
An abstraction of accessing an OpenShift cluster
"""
def __init__(self, base_url, subdomain, user, password):
"""
Constructor
:param base_url: url to access the api, like https://oso.example.org:8443/
:param subdomain: the subdomain for creating the routes, like osoapps.example.org
:param user:
:param password:
"""
if base_url[-1] == '/':
base_url = base_url[:-1]
self.base_url = base_url
self.subdomain = subdomain
self.oapi_base_url = base_url + '/oapi/v1'
self.kube_base_url = base_url + '/api/v1'
self.template_base_url = base_url + '/apis/template.openshift.io/v1'
self.user = user
self.password = password
# token_data caches the token to access the API. See _request_token() for details.
self.token_data = None
self._session = requests.session()
@staticmethod
def make_base_kube_object(kind, name=None):
return dict(
kind=kind,
apiVersion="v1",
metadata=dict(
name=name
)
)
@staticmethod
def print_response(resp):
if resp.ok:
print('success: %s' % resp.status_code)
pprint(resp.json())
else:
print('error in response: %s %s %s' % (resp.status_code, resp.reason, resp.text))
def _request_token(self, current_ts=None):
"""
Requests an access token for the cluster
:param current_ts: current timestamp
:return: dict containing access_token, lifetime and expiry time
"""
url = self.base_url + '/oauth/authorize'
auth_encoded = base64.b64encode(bytes('%s:%s' % (self.user, self.password)))
headers = {
'Authorization': 'Basic %s' % str(auth_encoded),
'X-Csrf-Token': '1'
}
params = {
'response_type': 'token',
'client_id': 'openshift-challenging-client'
}
resp = requests.get(url, headers=headers, verify=False, params=params, allow_redirects=False)
location = resp.headers.get('location')
if not current_ts:
current_ts = int(time.time())
parsed_data = urlparse(location)
parsed_query = parse_qs(parsed_data.fragment)
return {
'access_token': parsed_query['access_token'][0],
'lifetime': int(parsed_query['expires_in'][0]),
'expires_at': int(parsed_query['expires_in'][0]) + current_ts,
}
def _get_token(self, current_ts=None):
"""
Caching version of _request_token
"""
if not self.token_data:
self.token_data = self._request_token(current_ts)
else:
if not current_ts:
current_ts = int(time.time())
if self.token_data['expires_at'] - TOKEN_REFRESH_DELTA < current_ts:
self.token_data = self._request_token(current_ts)
return self.token_data['access_token']
def _construct_object_url(self, api_type, namespace=None, object_kind=None, object_id=None, subop=None):
"""
Create a url string for given object
:param kubeapi: whether plain k8s or oso api is used
:param namespace: namespace for the object
:param object_kind: type of the object
:param object_id: id of the object
:return: url string, like 'https://oso.example.org:8443/api/v1/my-project/pods/18hfgy1'
"""
if api_type == 'kubeapi':
url_components = [self.kube_base_url]
elif api_type == 'template_oapi':
url_components = [self.template_base_url]
else:
url_components = [self.oapi_base_url]
if namespace:
url_components.append('namespaces')
url_components.append(namespace)
if object_kind:
url_components.append(object_kind)
if object_id:
url_components.append(object_id)
if subop:
url_components.append(subop)
url = '/'.join(url_components)
return url
def make_request(self, method=None, api_type='oapi', verbose=False, namespace=None, object_kind=None, object_id=None,
subop=None, params=None, data=None, raise_on_failure=True):
"""
Makes a request to OpenShift API
:param method: GET, PUT, POST
:param kubeapi: whether plain k8s or oso api is used
:param verbose: debugging on
:param namespace: namespace for the object
:param object_kind: type of the object
:param object_id: id of the object
:param subop: if it's a suboperation eg. getting logs of an object
:param params: request parameters
:param data: request data
:param raise_on_failure: should we raise a RuntimeError on failure
:return: response object from requests session
"""
url = self._construct_object_url(api_type, namespace, object_kind, object_id, subop)
headers = {'Authorization': 'Bearer %s' % self._get_token()}
if isinstance(data, dict):
data = json.dumps(data)
if data:
if not method or method == 'POST':
resp = self._session.post(url, headers=headers, verify=False, params=params, data=data)
elif method == 'PUT':
resp = self._session.put(url, headers=headers, verify=False, params=params, data=data)
else:
raise RuntimeError('Do not know what to do with data and method %s' % method)
else:
if method and method != 'GET':
raise RuntimeError('Do not know what to do with no data and method %s' % method)
resp = self._session.get(url, headers=headers, verify=False, params=params)
if verbose:
self.print_response(resp)
if raise_on_failure and not resp.ok:
raise RuntimeError(resp.text)
return resp
def make_delete_request(self, api_type='oapi', verbose=False, namespace=None, object_kind=None, object_id=None,
params=None, raise_on_failure=True):
"""
Makes a delete request to OpenShift API
:param kubeapi: whether plain k8s or oso api is used
:param verbose: debugging on
:param namespace: namespace for the object
:param object_kind: type of the object
:param object_id: id of the object
:param raise_on_failure: should we raise a RuntimeError on failure
:return: response object from requests session
"""
url = self._construct_object_url(api_type, namespace, object_kind, object_id)
headers = {'Authorization': 'Bearer %s' % self._get_token()}
resp = self._session.delete(url, headers=headers, verify=False, params=params)
if verbose:
self.print_response(resp)
if raise_on_failure and not resp.ok:
raise RuntimeError(resp.text)
return resp
def search_by_label(self, api_type, namespace=None, object_kind=None, params=None):
"""
Performs a search by label(s)
:param kubeapi: k8s api instead of openshift
:param namespace:
:param object_kind:
:param params: a dict containing search criteria, like {'labelSelector': 'app=my-app'}
:return: search results as json
"""
res = self.make_request(
api_type=api_type,
namespace=namespace,
object_kind=object_kind,
params=params
)
res_json = res.json()
return res_json.get('items', [])
class OpenShiftDriverAccessProxy(object):
"""
Abstraction layer for isolating driver from real world to enable mocking in unit tests
"""
def __init__(self, m2m_creds):
self._m2m_creds = m2m_creds
def get_openshift_client(self, cluster_id):
key_base = 'OSD_%s_' % cluster_id
return OpenShiftClient(
base_url=self._m2m_creds.get(key_base + 'BASE_URL'),
subdomain=self._m2m_creds.get(key_base + 'SUBDOMAIN'),
user=self._m2m_creds.get(key_base + 'USER'),
password=self._m2m_creds.get(key_base + 'PASSWORD'),
)
@staticmethod
def get_pb_client(token, api_base_url, ssl_verify):
return PBClient(token, api_base_url, ssl_verify)
class OpenShiftDriver(base_driver.ProvisioningDriverBase):
""" OpenShift Driver allows provisioning instances in an existing OpenShift cluster.
It creates a project per user, identified by user eppn, and optionally a persistent
volume claim (PVC) for user data.
The driver needs credentials for the cluster. The credentials are placed in the same
m2m creds file that OpenStack and Docker driver use. The keys are as follows:
"OSD_[cluster_id]_BASE_URL": "https://oso-cluster-api.example.org:8443",
"OSD_[cluster_id]_SUBDOMAIN": "oso-cluster.example.org",
"OSD_[cluster_id]_USER": "pebbles-m2m-user",
"OSD_[cluster_id]_PASSWORD": "<PASSWORD>"
Replace [cluster_id] with a unique string to a cluster. When creating a blueprint template,
refer to the cluster id in the configuration, key 'openshift_cluster_id' .You can have multiple
credentials configured in the creds file.
"""
def get_configuration(self):
from pebbles.drivers.provisioning.openshift_driver_config import CONFIG
config = CONFIG.copy()
return config
def get_running_instance_logs(self, token, instance_id):
""" Get the logs of the openshift based instance which is in running state """
self.logger.debug("getting container logs for instance id %s" % instance_id)
ap = self._get_access_proxy()
pbclient = ap.get_pb_client(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False)
running_log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='running')
instance = pbclient.get_instance_description(instance_id)
# create openshift client by getting the cluster id from the blueprint config
blueprint = pbclient.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
oc = ap.get_openshift_client(
cluster_id=blueprint_config['openshift_cluster_id'],
)
instance_name = instance['name']
project = self._get_project_name(instance)
log_res = oc.make_request(
method='GET',
namespace=project,
object_kind='deploymentconfigs',
object_id=instance_name,
subop='log',
)
running_log_uploader.info(log_res.text)
def _get_access_proxy(self):
if not getattr(self, '_ap', None):
m2m_creds = self.get_m2m_credentials()
self._ap = OpenShiftDriverAccessProxy(m2m_creds)
return self._ap
def do_update_connectivity(self, token, instance_id):
self.logger.warning('do_update_connectivity not implemented')
def do_provision(self, token, instance_id):
self.logger.debug('do_provision %s' % instance_id)
log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='provisioning')
log_uploader.info('Provisioning OpenShift based instance (%s)\n' % instance_id)
return self._do_provision(token, instance_id, int(time.time()))
def _do_provision(self, token, instance_id, cur_ts):
"""
Provisions a new instance on OpenShift.
:param token: token to access the API with
:param instance_id: instance that should be provisioned
:param cur_ts: current time
"""
ap = self._get_access_proxy()
pbclient = ap.get_pb_client(token, self.config['INTERNAL_API_BASE_URL'], ssl_verify=False)
log_uploader = self.create_prov_log_uploader(token, instance_id, log_type='provisioning')
instance = pbclient.get_instance_description(instance_id)
# fetch config
blueprint = pbclient.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
instance_token = None
if 'auto_authentication' in blueprint_config and blueprint_config['auto_authentication']:
instance_seconds = parse_maximum_lifetime(blueprint_config['maximum_lifetime'])
instance_token = pbclient.create_instance_token(instance_id, instance_seconds)
# get/generate a project name
project_name = self._get_project_name(instance)
# create an openshift client based for selected cluster
oc = ap.get_openshift_client(
cluster_id=blueprint_config['openshift_cluster_id'],
)
# create a dict out of space separated list of VAR=VAL entries
env_var_array = blueprint_config.get('environment_vars', '').split()
env_vars = {k: v for k, v in [x.split('=') for x in env_var_array]}
env_vars['INSTANCE_ID'] = instance_id
# merge the autodownload vars into environment
for var_suffix in ('url', 'filename'):
var = 'autodownload_{}'.format(var_suffix)
if blueprint_config.get(var, None):
env_vars[var.upper()] = blueprint_config[var]
# create a project and PVC if necessary and spawn a pod (through | |
(556, True),
(557, False),
(558, False),
(559, False),
(560, True),
(561, False),
(562, False),
(563, False),
(564, True),
(565, False),
(566, False),
(567, False),
(568, True),
(569, False),
(570, False),
(571, False),
(572, True),
(573, False),
(574, False),
(575, False),
(576, True),
(577, False),
(578, False),
(579, False),
(580, True),
(581, False),
(582, False),
(583, False),
(584, True),
(585, False),
(586, False),
(587, False),
(588, True),
(589, False),
(590, False),
(591, False),
(592, True),
(593, False),
(594, False),
(595, False),
(596, True),
(597, False),
(598, False),
(599, False),
(600, False),
(601, False),
(602, False),
(603, False),
(604, True),
(605, False),
(606, False),
(607, False),
(608, True),
(609, False),
(610, False),
(611, False),
(612, True),
(613, False),
(614, False),
(615, False),
(616, True),
(617, False),
(618, False),
(619, False),
(620, True),
(621, False),
(622, False),
(623, False),
(624, True),
(625, False),
(626, False),
(627, False),
(628, True),
(629, False),
(630, False),
(631, False),
(632, True),
(633, False),
(634, False),
(635, False),
(636, True),
(637, False),
(638, False),
(639, False),
(640, True),
(641, False),
(642, False),
(643, False),
(644, True),
(645, False),
(646, False),
(647, False),
(648, True),
(649, False),
(650, False),
(651, False),
(652, True),
(653, False),
(654, False),
(655, False),
(656, True),
(657, False),
(658, False),
(659, False),
(660, True),
(661, False),
(662, False),
(663, False),
(664, True),
(665, False),
(666, False),
(667, False),
(668, True),
(669, False),
(670, False),
(671, False),
(672, True),
(673, False),
(674, False),
(675, False),
(676, True),
(677, False),
(678, False),
(679, False),
(680, True),
(681, False),
(682, False),
(683, False),
(684, True),
(685, False),
(686, False),
(687, False),
(688, True),
(689, False),
(690, False),
(691, False),
(692, True),
(693, False),
(694, False),
(695, False),
(696, True),
(697, False),
(698, False),
(699, False),
(700, False),
(701, False),
(702, False),
(703, False),
(704, True),
(705, False),
(706, False),
(707, False),
(708, True),
(709, False),
(710, False),
(711, False),
(712, True),
(713, False),
(714, False),
(715, False),
(716, True),
(717, False),
(718, False),
(719, False),
(720, True),
(721, False),
(722, False),
(723, False),
(724, True),
(725, False),
(726, False),
(727, False),
(728, True),
(729, False),
(730, False),
(731, False),
(732, True),
(733, False),
(734, False),
(735, False),
(736, True),
(737, False),
(738, False),
(739, False),
(740, True),
(741, False),
(742, False),
(743, False),
(744, True),
(745, False),
(746, False),
(747, False),
(748, True),
(749, False),
(750, False),
(751, False),
(752, True),
(753, False),
(754, False),
(755, False),
(756, True),
(757, False),
(758, False),
(759, False),
(760, True),
(761, False),
(762, False),
(763, False),
(764, True),
(765, False),
(766, False),
(767, False),
(768, True),
(769, False),
(770, False),
(771, False),
(772, True),
(773, False),
(774, False),
(775, False),
(776, True),
(777, False),
(778, False),
(779, False),
(780, True),
(781, False),
(782, False),
(783, False),
(784, True),
(785, False),
(786, False),
(787, False),
(788, True),
(789, False),
(790, False),
(791, False),
(792, True),
(793, False),
(794, False),
(795, False),
(796, True),
(797, False),
(798, False),
(799, False),
(800, True),
(801, False),
(802, False),
(803, False),
(804, True),
(805, False),
(806, False),
(807, False),
(808, True),
(809, False),
(810, False),
(811, False),
(812, True),
(813, False),
(814, False),
(815, False),
(816, True),
(817, False),
(818, False),
(819, False),
(820, True),
(821, False),
(822, False),
(823, False),
(824, True),
(825, False),
(826, False),
(827, False),
(828, True),
(829, False),
(830, False),
(831, False),
(832, True),
(833, False),
(834, False),
(835, False),
(836, True),
(837, False),
(838, False),
(839, False),
(840, True),
(841, False),
(842, False),
(843, False),
(844, True),
(845, False),
(846, False),
(847, False),
(848, True),
(849, False),
(850, False),
(851, False),
(852, True),
(853, False),
(854, False),
(855, False),
(856, True),
(857, False),
(858, False),
(859, False),
(860, True),
(861, False),
(862, False),
(863, False),
(864, True),
(865, False),
(866, False),
(867, False),
(868, True),
(869, False),
(870, False),
(871, False),
(872, True),
(873, False),
(874, False),
(875, False),
(876, True),
(877, False),
(878, False),
(879, False),
(880, True),
(881, False),
(882, False),
(883, False),
(884, True),
(885, False),
(886, False),
(887, False),
(888, True),
(889, False),
(890, False),
(891, False),
(892, True),
(893, False),
(894, False),
(895, False),
(896, True),
(897, False),
(898, False),
(899, False),
(900, False),
(901, False),
(902, False),
(903, False),
(904, True),
(905, False),
(906, False),
(907, False),
(908, True),
(909, False),
(910, False),
(911, False),
(912, True),
(913, False),
(914, False),
(915, False),
(916, True),
(917, False),
(918, False),
(919, False),
(920, True),
(921, False),
(922, False),
(923, False),
(924, True),
(925, False),
(926, False),
(927, False),
(928, True),
(929, False),
(930, False),
(931, False),
(932, True),
(933, False),
(934, False),
(935, False),
(936, True),
(937, False),
(938, False),
(939, False),
(940, True),
(941, False),
(942, False),
(943, False),
(944, True),
(945, False),
(946, False),
(947, False),
(948, True),
(949, False),
(950, False),
(951, False),
(952, True),
(953, False),
(954, False),
(955, False),
(956, True),
(957, False),
(958, False),
(959, False),
(960, True),
(961, False),
(962, False),
(963, False),
(964, True),
(965, False),
(966, False),
(967, False),
(968, True),
(969, False),
(970, False),
(971, False),
(972, True),
(973, False),
(974, False),
(975, False),
(976, True),
(977, False),
(978, False),
(979, False),
(980, True),
(981, False),
(982, False),
(983, False),
(984, True),
(985, False),
(986, False),
(987, False),
(988, True),
(989, False),
(990, False),
(991, False),
(992, True),
(993, False),
(994, False),
(995, False),
(996, True),
(997, False),
(998, False),
(999, False),
(1000, False),
(1001, False),
(1002, False),
(1003, False),
(1004, True),
(1005, False),
(1006, False),
(1007, False),
(1008, True),
(1009, False),
(1010, False),
(1011, False),
(1012, True),
(1013, False),
(1014, False),
(1015, False),
(1016, True),
(1017, False),
(1018, False),
(1019, False),
(1020, True),
(1021, False),
(1022, False),
(1023, False),
(1024, True),
(1025, False),
(1026, False),
(1027, False),
(1028, True),
(1029, False),
(1030, False),
(1031, False),
(1032, True),
(1033, False),
(1034, False),
(1035, False),
(1036, True),
(1037, False),
(1038, False),
(1039, False),
(1040, True),
(1041, False),
(1042, False),
(1043, False),
(1044, True),
(1045, False),
(1046, False),
(1047, False),
(1048, True),
(1049, False),
(1050, False),
(1051, False),
(1052, True),
(1053, False),
(1054, False),
(1055, False),
(1056, True),
(1057, False),
(1058, False),
(1059, False),
(1060, True),
(1061, False),
(1062, False),
(1063, False),
(1064, True),
(1065, False),
(1066, False),
(1067, False),
(1068, True),
(1069, False),
(1070, False),
(1071, False),
(1072, True),
(1073, False),
(1074, False),
(1075, False),
(1076, True),
(1077, False),
(1078, False),
(1079, False),
(1080, True),
(1081, False),
(1082, False),
(1083, False),
(1084, True),
(1085, False),
(1086, False),
(1087, False),
(1088, True),
(1089, False),
(1090, False),
(1091, False),
(1092, True),
(1093, False),
(1094, False),
(1095, False),
(1096, True),
(1097, False),
(1098, False),
(1099, False),
(1100, False),
(1101, False),
(1102, False),
(1103, False),
(1104, True),
(1105, False),
(1106, False),
(1107, False),
(1108, True),
(1109, False),
(1110, False),
(1111, False),
(1112, True),
(1113, False),
(1114, False),
(1115, False),
(1116, True),
(1117, False),
(1118, False),
(1119, False),
(1120, True),
(1121, False),
(1122, False),
(1123, False),
(1124, True),
(1125, False),
(1126, False),
(1127, False),
(1128, True),
(1129, False),
(1130, False),
(1131, False),
(1132, True),
(1133, False),
(1134, False),
(1135, False),
(1136, True),
(1137, False),
(1138, False),
(1139, False),
(1140, True),
(1141, False),
(1142, False),
(1143, False),
(1144, True),
(1145, False),
(1146, False),
(1147, False),
(1148, True),
(1149, False),
(1150, False),
(1151, False),
(1152, True),
(1153, False),
(1154, False),
(1155, False),
(1156, True),
(1157, False),
(1158, False),
(1159, False),
(1160, True),
(1161, False),
(1162, False),
(1163, False),
(1164, True),
(1165, False),
(1166, False),
(1167, False),
(1168, True),
(1169, False),
(1170, False),
(1171, False),
(1172, True),
(1173, False),
(1174, False),
(1175, False),
(1176, True),
(1177, False),
(1178, False),
(1179, False),
(1180, True),
(1181, False),
(1182, False),
(1183, False),
(1184, True),
(1185, False),
(1186, False),
(1187, False),
(1188, True),
(1189, False),
(1190, False),
(1191, False),
(1192, True),
(1193, False),
(1194, False),
(1195, False),
| |
"""Learn ideal points with the text-based ideal point model (TBIP).
Let y_{dv} denote the counts of word v in document d. Let x_d refer to the
ideal point of the author of document d. Then we model:
theta, beta ~ Gamma(alpha, alpha)
x, eta ~ N(0, 1)
y_{dv} ~ Pois(sum_k theta_dk beta_kv exp(x_d * eta_kv).
We perform variational inference to provide estimates for the posterior
distribution of each latent variable. We take reparameterization gradients,
using a lognormal variational family for the positive variables (theta, beta)
and a normal variational family for the real variables (x, eta).
The directory `data/{data_name}/clean/` should have the following four files:
1. `counts.npz`: a [num_documents, num_words] sparse matrix containing the
word counts for each document.
2. `author_indices.npy`: a [num_documents] vector where each entry is an
integer in the set {0, 1, ..., num_authors - 1}, indicating the author of
the corresponding document in `counts.npz`.
3. `vocabulary.txt`: a [num_words]-length file where each line is a string
denoting the corresponding word in the vocabulary.
4. `author_map.txt`: a [num_authors]-length file where each line is a string
denoting the name of an author in the corpus.
We provide more details in our paper [1].
#### References
[1]: <NAME>, <NAME>, <NAME>. Text-Based Ideal Points. In
_Conference of the Association for Computational Linguistics_, 2020.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os
import time
from absl import flags
import numpy as np
import scipy.sparse as sparse
import tensorflow as tf
import tensorflow_probability as tfp
flags.DEFINE_float("learning_rate",
default=0.01,
help="Adam learning rate.")
flags.DEFINE_integer("max_steps",
default=1000000,
help="Number of training steps to run.")
flags.DEFINE_integer("num_topics",
default=50,
help="Number of topics.")
flags.DEFINE_integer("batch_size",
default=1024,
help="Batch size.")
flags.DEFINE_integer("num_samples",
default=1,
help="Number of samples to use for ELBO approximation.")
flags.DEFINE_enum("counts_transformation",
default="nothing",
enum_values=["nothing", "binary", "sqrt", "log"],
help="Transformation used on counts data.")
flags.DEFINE_boolean("pre_initialize_parameters",
default=True,
help="Whether to use pre-initialized document and topic "
"intensities (with Poisson factorization).")
flags.DEFINE_string("data",
default="senate-speeches-114",
help="Data source being used.")
flags.DEFINE_integer("senate_session",
default=113,
help="Senate session (used only when data is "
"'senate-speech-comparisons'.")
flags.DEFINE_integer("print_steps",
default=500,
help="Number of steps to print and save results.")
flags.DEFINE_integer("seed",
default=123,
help="Random seed to be used.")
FLAGS = flags.FLAGS
def build_input_pipeline(data_dir,
batch_size,
random_state,
counts_transformation="nothing"):
"""Load data and build iterator for minibatches.
Args:
data_dir: The directory where the data is located. There must be four
files inside the rep: `counts.npz`, `author_indices.npy`,
`author_map.txt`, and `vocabulary.txt`.
batch_size: The batch size to use for training.
random_state: A NumPy `RandomState` object, used to shuffle the data.
counts_transformation: A string indicating how to transform the counts.
One of "nothing", "binary", "log", or "sqrt".
"""
counts = sparse.load_npz(os.path.join(data_dir, "counts.npz"))
num_documents, num_words = counts.shape
author_indices = np.load(
os.path.join(data_dir, "author_indices.npy")).astype(np.int32)
num_authors = np.max(author_indices + 1)
author_map = np.loadtxt(os.path.join(data_dir, "author_map.txt"),
dtype=str,
delimiter="\n",
encoding='latin-1')
# Shuffle data.
documents = random_state.permutation(num_documents)
shuffled_author_indices = author_indices[documents]
shuffled_counts = counts[documents]
# Apply counts transformation.
if counts_transformation == "nothing":
count_values = shuffled_counts.data
elif counts_transformation == "binary":
count_values = np.int32(shuffled_counts.data > 0)
elif counts_transformation == "log":
count_values = np.round(np.log(1 + shuffled_counts.data))
elif counts_transformation == "sqrt":
count_values = np.round(np.sqrt(shuffled_counts.data))
else:
raise ValueError("Unrecognized counts transformation.")
# Store counts as sparse tensor so it occupies less memory.
shuffled_counts = tf.SparseTensor(
indices=np.array(shuffled_counts.nonzero()).T,
values=count_values,
dense_shape=shuffled_counts.shape)
dataset = tf.data.Dataset.from_tensor_slices(
(documents, shuffled_counts, shuffled_author_indices))
batches = dataset.repeat().batch(batch_size).prefetch(batch_size)
iterator = batches.make_one_shot_iterator()
vocabulary = np.loadtxt(os.path.join(data_dir, "vocabulary.txt"),
dtype=str,
delimiter="\n",
comments="<!-")
total_counts_per_author = np.bincount(
author_indices,
weights=np.array(np.sum(counts, axis=1)).flatten())
counts_per_document_per_author = (
total_counts_per_author / np.bincount(author_indices))
# Author weights is how much lengthy each author's opinion over average is.
author_weights = (counts_per_document_per_author /
np.mean(np.sum(counts, axis=1))).astype(np.float32)
return (iterator, author_weights, vocabulary, author_map,
num_documents, num_words, num_authors)
def build_lognormal_variational_parameters(initial_document_loc,
initial_objective_topic_loc,
num_documents,
num_words,
num_topics):
"""
Build document and objective topic lognormal variational parameters.
Args:
initial_document_loc: A [num_documents, num_topics] NumPy array containing
the initial document intensity means.
initial_objective_topic_loc: A [num_topics, num_words] NumPy array
containing the initial objective topic means.
num_documents: Number of documents in the data set.
num_words: Number of words in the data set.
num_topics: Number of topics.
Returns:
document_loc: A Variable object with shape [num_documents, num_topics].
document_scale: A positive Variable object with shape [num_documents,
num_topics].
objective_topic_loc: A Variable object with shape [num_topics, num_words].
objective_topic_scale: A positive Variable object with shape [num_topics,
num_words].
"""
document_loc = tf.get_variable(
"document_loc",
initializer=tf.constant(np.log(initial_document_loc)))
objective_topic_loc = tf.get_variable(
"objective_topic_loc",
initializer=tf.constant(np.log(initial_objective_topic_loc)))
document_scale_logit = tf.get_variable(
"document_scale_logit",
shape=[num_documents, num_topics],
initializer=tf.initializers.random_normal(mean=0, stddev=1.),
dtype=tf.float32)
objective_topic_scale_logit = tf.get_variable(
"objective_topic_scale_logit",
shape=[num_topics, num_words],
initializer=tf.initializers.random_normal(mean=0, stddev=1.),
dtype=tf.float32)
document_scale = tf.nn.softplus(document_scale_logit)
objective_topic_scale = tf.nn.softplus(objective_topic_scale_logit)
tf.summary.histogram("params/document_loc", document_loc)
tf.summary.histogram("params/objective_topic_loc", objective_topic_loc)
tf.summary.histogram("params/document_scale", document_scale)
tf.summary.histogram("params/objective_topic_scale", objective_topic_scale)
return (document_loc, document_scale,
objective_topic_loc, objective_topic_scale)
def print_topics(neutral_mean, negative_mean, positive_mean, vocabulary):
"""Get neutral and ideological topics to be used for Tensorboard.
Args:
neutral_mean: The mean of the neutral topics, a NumPy matrix with shape
[num_topics, num_words].
negative_mean: The mean of the negative topics, a NumPy matrix with shape
[num_topics, num_words].
positive_mean: The mean of the positive topics, a NumPy matrix with shape
[num_topics, num_words].
vocabulary: A list of the vocabulary with shape [num_words].
Returns:
topic_strings: A list of the negative, neutral, and positive topics.
"""
num_topics, num_words = neutral_mean.shape
words_per_topic = 10
top_neutral_words = np.argsort(-neutral_mean, axis=1)
top_negative_words = np.argsort(-negative_mean, axis=1)
top_positive_words = np.argsort(-positive_mean, axis=1)
topic_strings = []
for topic_idx in range(num_topics):
neutral_start_string = "Neutral {}:".format(topic_idx)
neutral_row = [vocabulary[word] for word in
top_neutral_words[topic_idx, :words_per_topic]]
neutral_row_string = ", ".join(neutral_row)
neutral_string = " ".join([neutral_start_string, neutral_row_string])
positive_start_string = "Positive {}:".format(topic_idx)
positive_row = [vocabulary[word] for word in
top_positive_words[topic_idx, :words_per_topic]]
positive_row_string = ", ".join(positive_row)
positive_string = " ".join([positive_start_string, positive_row_string])
negative_start_string = "Negative {}:".format(topic_idx)
negative_row = [vocabulary[word] for word in
top_negative_words[topic_idx, :words_per_topic]]
negative_row_string = ", ".join(negative_row)
negative_string = " ".join([negative_start_string, negative_row_string])
topic_strings.append(" \n".join(
[negative_string, neutral_string, positive_string]))
return np.array(topic_strings)
def print_ideal_points(ideal_point_loc, author_map):
"""Print ideal point ordering for Tensorboard."""
return ", ".join(author_map[np.argsort(ideal_point_loc)])
def get_log_prior(samples, prior):
"""Return log prior of sampled Gaussians.
Args:
samples: A `Tensor` with shape `[num_samples, :, :]`.
prior: String representing prior distribution.
Returns:
log_prior: A `Tensor` with shape `[num_samples]`, with the log priors
summed across latent dimensions.
"""
if prior == 'normal':
prior_distribution = tfp.distributions.Normal(loc=0., scale=1.)
elif prior == 'gamma':
prior_distribution = tfp.distributions.Gamma(concentration=0.3, rate=0.3)
log_prior = tf.reduce_sum(prior_distribution.log_prob(samples),
axis=[1, 2])
return log_prior
def get_elbo(counts,
document_indices,
author_indices,
author_weights,
document_distribution,
objective_topic_distribution,
ideological_topic_distribution,
ideal_point_distribution,
num_documents,
batch_size,
num_samples=1):
"""Approximate variational Lognormal ELBO using reparameterization.
Args:
counts: A matrix with shape `[batch_size, num_words]`.
document_indices: An int-vector with shape `[batch_size]`.
author_indices: An int-vector with shape `[batch_size]`.
author_weights: A vector with shape `[num_authors]`, constituting how
lengthy the opinion is above average.
document_distribution: A positive `Distribution` object with parameter
shape `[num_documents, num_topics]`.
objective_topic_distribution: A positive `Distribution` object with
parameter shape `[num_topics, num_words]`.
ideological_topic_distribution: A positive `Distribution` object with
parameter shape `[num_topics, num_words]`.
ideal_point_distribution: A `Distribution` object over [0, 1] with
parameter_shape `[num_authors]`.
num_documents: The number of documents in the total data set (used to
calculate log-likelihood scale).
batch_size: Batch size (used to calculate log-likelihood scale).
num_samples: Number of Monte-Carlo samples.
Returns:
elbo: A scalar representing a Monte-Carlo sample of the ELBO. This value is
averaged across samples and summed across batches.
"""
document_samples = document_distribution.sample(num_samples)
objective_topic_samples = objective_topic_distribution.sample(num_samples)
ideological_topic_samples = ideological_topic_distribution.sample(
num_samples)
ideal_point_samples = ideal_point_distribution.sample(num_samples)
_, num_topics, _ = objective_topic_samples.get_shape().as_list()
ideal_point_log_prior = tfp.distributions.Normal(
loc=0.,
scale=1.)
ideal_point_log_prior = tf.reduce_sum(
ideal_point_log_prior.log_prob(ideal_point_samples), axis=[1,2])
document_log_prior = get_log_prior(document_samples, 'gamma')
objective_topic_log_prior = get_log_prior(objective_topic_samples, 'gamma')
ideological_topic_log_prior = get_log_prior(ideological_topic_samples,
'normal')
log_prior = (document_log_prior +
objective_topic_log_prior +
ideological_topic_log_prior +
ideal_point_log_prior)
selected_document_samples = tf.gather(document_samples,
document_indices,
axis=1)
selected_ideal_points = tf.gather(ideal_point_samples,
author_indices,
axis=1)
selected_ideological_topic_samples = tf.exp(
# replace by a column
selected_ideal_points[:, :, :, tf.newaxis] *
ideological_topic_samples[:, tf.newaxis, :, :])
# Normalize by how lengthy the author's opinion is.
selected_author_weights = tf.gather(author_weights, author_indices)
selected_ideological_topic_samples = (
selected_author_weights[tf.newaxis, :, tf.newaxis, tf.newaxis] *
selected_ideological_topic_samples)
document_entropy = -tf.reduce_sum(
document_distribution.log_prob(document_samples),
axis=[1, 2])
objective_topic_entropy = -tf.reduce_sum(
objective_topic_distribution.log_prob(objective_topic_samples),
axis=[1, 2])
ideological_topic_entropy = -tf.reduce_sum(
ideological_topic_distribution.log_prob(ideological_topic_samples),
axis=[1, 2])
ideal_point_entropy = -tf.reduce_sum(
ideal_point_distribution.log_prob(ideal_point_samples),
axis=1)
entropy = (document_entropy +
objective_topic_entropy +
ideological_topic_entropy +
ideal_point_entropy)
rate = tf.reduce_sum(
selected_document_samples[:, :, :, tf.newaxis] *
objective_topic_samples[:, tf.newaxis, :, :] *
selected_ideological_topic_samples[:, :, :, :],
axis=2)
count_distribution = tfp.distributions.Poisson(rate=rate)
# Need to un-sparsify the counts to evaluate log-likelihood.
count_log_likelihood = | |
result_out = economics.json() if importance is None else self.__importance("economics", economics.json(),
importance)
return result_out
def guidance(self, page=None, pagesize=None, date_asof=None, date_from=None, date_to=None,
company_tickers=None, importance=None, date_sort=None, updated_params=None, country=None):
"""Public Method: Benzinga Guidance looks at different attributes like revenue guidance etc.
Arguments:
Optional:
page (int) - page offset
pagesize (int) - limit of results returned
date_asof (str) - "YYYY-MM-DD"
date_from (str) - "YYYY-MM-DD"
date_to (str) - "YYYY-MM-DD"
company_tickers (str)
importance - (int) - not tested yet.
date_sort - (str) - Dividend date field to sort on
updated_params (int64) - records last updated unix time stamp. Forces the
sort order to be greater or equal to the time stamp indicated.
country (str) - 3 digit country code
Returns:
id, date, time, ticker, exchange, name, period, period_year, prelim, eps_guidance_est,
eps_guidance_max, eps_guidance_min, eps_guidance_prior_max, eps_guidance_prior_min,
revenue_guidance_est, revenue_guidance_max, revenue_guidance_min, revenue_guidance_prior_max
, revenue_guidance_prior_min, importance, updated"""
params = {
'token': self.token,
"page": page,
"pagesize": pagesize,
"parameters[date]": date_asof,
"parameters[date_from]": date_from,
"parameters[date_to]": date_to,
"parameters[tickers]": company_tickers,
"parameters[importance]": None,
"parameters[date_sort]": date_sort,
"parameters[updated]": updated_params,
"country": country
}
self.param_initiate.calendar_check(params)
try:
guidance_url = self.__url_call("calendar", "guidance")
guidance = requests_retry_session().get(guidance_url, headers=self.headers, params=params, timeout=10)
statement = "Status Code: {status_code} Endpoint: {endpoint}".format(endpoint=guidance.url,
status_code=guidance.status_code)
if self.log:
log.info(statement)
self.__check_status(guidance.status_code)
except requests.exceptions.RequestException as err:
self.__check_status(err.response.status_code)
result_out = guidance.json() if importance is None else self.__importance("guidance", guidance.json(),
importance)
return result_out
def ipo(self, page=None, pagesize=None, date_asof=None, date_from=None, date_to=None,
company_tickers=None, importance=None, date_sort=None, updated_params=None):
"""Public Method: Benzing IPO looks at initial public offering data for companies.
Arguments:
Optional:
page (int) - page offset
pagesize (int) - limit of results returned
date_asof (str) - "YYYY-MM-DD"
date_from (str) - "YYYY-MM-DD"
date_to (str) - "YYYY-MM-DD"
company_tickers (str)
importance - (int) - not tested yet.
date_sort - "str" - Dividend date field to sort on
updated_params (int64) - records last updated unix time stamp. Forces the
sort order to be greater or equal to the time stamp indicated.
Returns:
id, date, time, ticker, exchange, name, pricing_date, price_min, price_max, deal_status,
insider_lockup_days, insider_lockup_date, offering_value, offering_shares, lead_underwriters,
underwriter_quiet_expiration_days, underwriter_quiet_expiration_date, update"""
params = {
'token': self.token,
"page": page,
"pagesize": pagesize,
"parameters[date]": date_asof,
"parameters[date_from]": date_from,
"parameters[date_to]": date_to,
"parameters[tickers]": company_tickers,
"parameters[importance]": None,
"parameters[date_sort]": date_sort,
"parameters[updated]": updated_params
}
self.param_initiate.calendar_check(params)
try:
ipo_url = self.__url_call("calendar", "ipos")
ipo = requests_retry_session().get(ipo_url, headers=self.headers, params=params, timeout=10)
statement = "Status Code: {status_code} Endpoint: {endpoint}".format(endpoint=ipo.url,
status_code=ipo.status_code)
if self.log:
log.info(statement)
self.__check_status(ipo.status_code)
except requests.exceptions.RequestException as err:
self.__check_status(err.response.status_code)
result_out = ipo.json() if importance is None else self.__importance("ipo", ipo.json(), importance)
return result_out
def ratings(self, page=None, pagesize=None, date_asof=None, date_from=None, date_to=None,
company_tickers=None, importance=None, date_sort=None, updated_params=None, action=None):
"""Public Method: Benzinga Ratings looks at ratings from different firms.
Arguments:
Optional:
page (int) - page offset
pagesize (int) - limit of results returned
date_asof (str) - "YYYY-MM-DD"
date_from (str) - "YYYY-MM-DD"
date_to (str) - "YYYY-MM-DD"
company_tickers (str)
importance - (int) - not tested yet.
date_sort - (str) - Dividend date field to sort on
updated_params (int64) - records last updated unix time stamp. Forces the
sort order to be greater or equal to the time stamp indicated.
action - (str) - " Upgrades , Downgrades , Maintains , Lowers , Raises ,
Initiates Coverage On , Terminates Coverage On"
Returns:
id, date, time, ticker, exchange, name, action_pt, action_company, rating_current,
pt_current, rating_prior, pt_prior, url, importance, updated, url_calendar, url_news,
analyst, analyst_name"""
params = {
'token': self.token,
"page": page,
"pagesize": pagesize,
"parameters[date]": date_asof,
"parameters[date_from]": date_from,
"parameters[date_to]": date_to,
"parameters[tickers]": company_tickers,
"parameters[importance]": None,
"parameters[date_sort]": date_sort,
"parameters[updated]": updated_params,
"parameters[action]": action
}
self.param_initiate.calendar_check(params)
try:
ratings_url = self.__url_call("calendar", "ratings")
ratings = requests_retry_session().get(ratings_url, headers=self.headers, params=params, timeout=10)
statement = "Status Code: {status_code} Endpoint: {endpoint}".format(endpoint=ratings.url,
status_code=ratings.status_code)
if self.log:
log.info(statement)
self.__check_status(ratings.status_code)
except requests.exceptions.RequestException as err:
self.__check_status(err.response.status_code)
result_out = ratings.json() if importance == None or (not ratings.json()) else self.__importance("ratings", ratings.json(), importance)
return result_out
def __importance(self, name, calendar_obj, importance):
new_list, revised_dict = list(filter(lambda x: x["importance"] == importance, calendar_obj[name])), {}
revised_dict[name] = new_list
return revised_dict
def conference_calls(self, page=None, pagesize=None, date_asof=None, date_from=None, date_to=None,
company_tickers=None, importance=None, date_sort=None, updated_params=None):
"""Public Method: Benzinga Conference calls looks at conference calls.
Arguments:
Optional:
page (int) - page offset
pagesize (int) - limit of results returned
date_asof (str) - "YYYY-MM-DD"
date_from (str) - "YYYY-MM-DD"
date_to (str) - "YYYY-MM-DD"
company_tickers (str)
importance - (int) - not tested yet.
date_sort - "str" - Dividend date field to sort on
updated_params (int64) - records last updated unix time stamp. Forces the
sort order to be greater or equal to the time stamp indicated.
Returns:
id, date, time, ticker, exchange, name, start_time, phone_num, international_line,
reservation_num, access_code, webcase_url, importance, updated"""
params = {
'token': self.token,
"page": page,
"pagesize": pagesize,
"parameters[date]": date_asof,
"parameters[date_from]": date_from,
"parameters[date_to]": date_to,
"parameters[tickers]": company_tickers,
"parameters[importance]": None,
"parameters[date_sort]": date_sort,
"parameters[updated]": updated_params
}
self.param_initiate.calendar_check(params)
try:
conference_url = self.__url_call("calendar", "conference-calls")
conference = requests_retry_session().get(conference_url, headers=self.headers, params=params, timeout=10)
statement = "Status Code: {status_code} Endpoint: {endpoint}".format(endpoint=conference.url,
status_code=conference.status_code)
if self.log:
log.info(statement)
self.__check_status(conference.status_code)
except requests.exceptions.RequestException as err:
self.__check_status(err.response.status_code)
result_out = conference.json() if importance == None else self.__importance("conference", conference.json(), importance)
return result_out
def fundamentals(self, company_tickers, date_asof=None):
"""Public Method: Benzinga Fundamentals looks at overall financial data for a company.
Arguments:
Required - company_tickers (str)
Optional:
date_asof (str) "YYYY-MM-DD"
Returns:
company, companyProfile, shareClass, earningReports, financialStatements, operation earning and valuation
ratios, alphaBeta
"""
params = {
'apikey': self.token,
"symbols": company_tickers,
"asOf": date_asof
}
self.param_initiate.fundamentals_check(params)
try:
financials_url = self.__url_call("fundamentals")
financials = requests_retry_session().get(financials_url, headers=self.headers, params=params, timeout=10)
statement = "Status Code: {status_code} Endpoint: {endpoint}".format(endpoint=financials.url,
status_code=financials.status_code)
if self.log:
log.info(statement)
self.__check_status(financials.status_code)
except requests.exceptions.RequestException as err:
self.__check_status(err.response.status_code)
return financials.json()
def financials(self, company_tickers, date_asof=None, period=None, reporttype=None):
"""Public Method: Benzinga Financials looks at overall financial data like for a company.
Arguments:
Required - company_tickers (str)
Optional:
date_asof (str) - "YYYY-MM-DD"
period (str) - select from (3M , 6M , 9M , 12M , 1Y)
reporttype (str) - select from (TTM, A (default), R,P)
Returns:
company, financials such as balance sheet information, assets and liabilities
"""
params = {
'apikey': self.token,
"symbols": company_tickers,
"asOf": date_asof,
"period": period,
"reportType": reporttype
}
self.param_initiate.fundamentals_check(params)
try:
financials_url = self.__url_call("fundamentals", "financials")
financials = requests_retry_session().get(financials_url, headers=self.headers,params=params, timeout=10)
statement = "Status Code: {status_code} Endpoint: {endpoint}".format(endpoint=financials.url,
status_code=financials.status_code)
if self.log:
log.info(statement)
self.__check_status(financials.status_code)
except requests.exceptions.RequestException as err:
self.__check_status(err.response.status_code)
return financials.json()
def valuation_ratios(self, company_tickers, date_asof=None):
"""Public Method: Benzinga Valuation Ratios looks at overall financial data like for a company.
Arguments:
Required - company_tickers (str)
Optional:
date_asof (str) - "YYYY-MM-DD"
Returns:
different attributes of the valuation ratios
"""
params = {
'apikey': self.token,
"symbols": company_tickers,
"asOf": date_asof
}
self.param_initiate.fundamentals_check(params)
try:
valuation_url = self.__url_call("fundamentals", "valuationRatios")
valuation = requests_retry_session().get(valuation_url, headers=self.headers, params=params, timeout=10)
statement = "Status Code: {status_code} Endpoint: {endpoint}".format(endpoint=valuation.url,
status_code=valuation.status_code)
if self.log:
log.info(statement)
self.__check_status(valuation.status_code)
except requests.exceptions.RequestException as err:
self.__check_status(err.response.status_code)
return valuation.json()
def earning_ratios(self, company_tickers, date_asof=None):
"""Public Method: Benzinga Earning Ratios
Arguments:
Required - company_tickers (str)
Optional:
date_asof (str) - "YYYY-MM-DD"
Returns:
different attributes of the earning ratios
"""
params = {
'apikey': self.token,
"symbols": company_tickers,
"asOf": date_asof
}
self.param_initiate.fundamentals_check(params)
try:
earnings_url = self.__url_call("fundamentals", "earningRatios")
earnings = requests_retry_session().get(earnings_url, headers=self.headers, params=params, timeout=10)
statement = "Status Code: {status_code} Endpoint: {endpoint}".format(endpoint=earnings.url,
status_code=earnings.status_code)
if self.log:
log.info(statement)
self.__check_status(earnings.status_code)
except requests.exceptions.RequestException as err:
self.__check_status(err.response.status_code)
return earnings.json()
def operation_ratios(self, company_tickers, date_asof=None):
"""Public Method: Benzinga Operation Ratios
Arguments:
Required - company_tickers (str)
Optional:
date_asof (str) - "YYYY-MM-DD"
Returns:
different attributes of the operation ratios
"""
params = {
'apikey': self.token,
"symbols": company_tickers,
"asOf": date_asof
}
self.param_initiate.fundamentals_check(params)
try:
operations_url = self.__url_call("fundamentals", "operationRatios")
operations = requests_retry_session().get(operations_url, headers=self.headers, params=params, timeout=10)
statement = "Status Code: {status_code} Endpoint: {endpoint}".format(endpoint=operations.url,
status_code=operations.status_code)
if self.log:
log.info(statement)
self.__check_status(operations.status_code)
except requests.exceptions.RequestException as err:
self.__check_status(err.response.status_code)
return operations.json()
def share_class(self, company_tickers, date_asof=None):
"""Public Method: Benzinga Share Class
Arguments:
Required - company_tickers (str)
Optional:
date_asof (str) - "YYYY-MM-DD"
Returns:
different attributes of the share class.
"""
params = {
'apikey': self.token,
"symbols": company_tickers,
"asOf": date_asof
}
self.param_initiate.fundamentals_check(params)
try:
shareclass_url = self.__url_call("fundamentals", "shareClass")
shareclass = requests_retry_session().get(shareclass_url, headers=self.headers,params=params, timeout=10)
statement = "Status Code: {status_code} Endpoint: {endpoint}".format(endpoint=shareclass.url,
status_code=shareclass.status_code)
if self.log:
log.info(statement)
self.__check_status(shareclass.status_code)
except requests.exceptions.RequestException as err:
self.__check_status(err.response.status_code)
return shareclass.json()
def earning_reports(self, company_tickers, date_asof=None):
"""Public Method: Benzinga Earning Reports looks at overall earning reports for a company.
Arguments:
Required - company_tickers (str)
Optional:
date_asof (str) - "YYYY-MM-DD"
Returns:
different attributes of the earning reports.
"""
params = {
'apikey': self.token,
"symbols": company_tickers,
"asOf": | |
hist.target_number = ca.target_number
hist.target_completed = ca.target_completed
hist.updated_date = datetime.now()
hist.save()
ca.time_interval = ProjectTimeInterval.objects.get(id=int(check[1]))
ca.interval_updated = True
if longitude != '':
ca.location = Point(
round(float(longitude), 6),
round(float(ca.latitude.real if ca.latitude else "27.7172"), 6),
srid=4326)
if latitude != '':
ca.location = Point(
round(float(ca.longitude.real if ca.longitude else "85.3240"), 6),
round(float(latitude), 6),
srid=4326)
ca.save()
else:
longitude = ''
latitude = ''
val = 'lat_' + item
if check[0] == val:
latitude = check[1]
val = 'long_' + item
if check[0] == val:
longitude = check[1]
val = 'target_' + item
if check[0] == val:
if ',' in check[1]:
value = check[1].replace(',', '')
ca.target_number = int(value)
else:
ca.target_number = int(check[1])
val = 'interval_' + item
if check[0] == val:
ca.time_interval = ProjectTimeInterval.objects.get(id=int(check[1]))
if longitude != '':
ca.location = Point(
round(float(longitude), 6),
round(float(ca.latitude.real if ca.latitude else "27.7172"), 6),
srid=4326)
if latitude != '':
ca.location = Point(
round(float(ca.longitude.real if ca.longitude else "85.3240"), 6),
round(float(latitude), 6),
srid=4326)
ca.save()
else:
for check in checked:
if not created:
val = 'interval_' + item
if check[0] == val:
if not ca.time_interval == ProjectTimeInterval.objects.get(id=int(check[1])):
ClusterAHistory.objects.get_or_create(
clustera=ca, time_interval=ca.time_interval, updated_date=datetime.now())
ca.time_interval = ProjectTimeInterval.objects.get(id=int(check[1]))
ca.interval_updated = True
ca.save()
else:
val = 'interval_' + item
if check[0] == val:
ca.time_interval = ProjectTimeInterval.objects.get(id=int(check[1]))
ca.save()
return redirect(reverse_lazy('cluster_list'))
class BeneficiaryListView(ManagerMixin, ListView):
model = Beneficiary
template_name = 'core/beneficiary-list.html'
def get_queryset(self, *args, **kwargs):
if self.request.is_super_admin:
return self.model.objects.all()
else:
if 'project_id' in self.request.session:
project = Project.objects.get(id=self.request.session['project_id'])
else:
project = self.request.project
return self.model.objects.filter(cluster__project=project)
class BeneficiaryCreateView(ManagerMixin, CreateView):
model = Beneficiary
template_name = 'core/beneficiary-form.html'
form_class = BeneficiaryForm
success_url = reverse_lazy('beneficiary_list')
def get_form_kwargs(self):
kwargs = super(BeneficiaryCreateView, self).get_form_kwargs()
if 'project_id' in self.request.session:
project = Project.objects.get(id=self.request.session['project_id'])
else:
project = self.request.project
kwargs['project'] = project
kwargs['is_super_admin'] = self.request.is_super_admin
return kwargs
class BeneficiaryDetailView(ManagerMixin, DetailView):
model = Beneficiary
template_name = 'core/beneficiary-detail.html'
class BeneficiaryUpdateView(ManagerMixin, UpdateView):
model = Beneficiary
template_name = 'core/beneficiary-form.html'
form_class = BeneficiaryForm
success_url = reverse_lazy('beneficiary_list')
def get_form_kwargs(self):
kwargs = super(BeneficiaryUpdateView, self).get_form_kwargs()
if 'project_id' in self.request.session:
project = Project.objects.get(id=self.request.session['project_id'])
else:
project = self.request.project
kwargs['project'] = project
kwargs['is_super_admin'] = self.request.is_super_admin
return kwargs
class BeneficiaryDeleteView(ManagerMixin, DeleteView):
model = Beneficiary
template_name = 'core/beneficiary-delete.html'
success_url = reverse_lazy('beneficiary_list')
# change this view if new excel sheets are to be uploaded
# better try matching the headers in the excel sheet
class BeneficiaryUploadView(ManagerMixin, View):
template_name = 'core/beneficiary-upload.html'
def post(self, request):
try:
filename = request.FILES['inputFile']
df = pd.read_excel(filename).fillna(value='')
total = df['Name '].count()
for row in range(0, total):
if 'Project' in df:
project = Project.objects.get(id=df['Project'][row])
else:
if 'project_id' in self.request.session:
project = Project.objects.get(id=self.request.session['project_id'])
else:
project = self.request.project
district, created = District.objects.get_or_create(name=df['District '][row])
municipality, created = Municipality.objects.get_or_create(
district=district, name=df['Municipal'][row])
cluster, created = Cluster.objects.get_or_create(
name=df['Cluster'][row],
project=project)
Beneficiary.objects.create(
name=df['Name '][row],
ward_no=df['Ward'][row],
cluster=cluster,
Type=df['Category'][row],
vulnerabilityType=df['Vulnerability Type'][row],
GovernmentTranch=df['Government Tranch Received'][row],
ConstructionPhase=df['House Construction Progress (as per 15 steps)'][row],
Typesofhouse=df['House Type (CSEB, Brick, Stone)'][row],
district=district,
municipality=municipality
)
# Beneficiary.objects.filter(name=df['Name'][row]).update(district=district, municipality=municipality)
return HttpResponseRedirect('/core/beneficiary-list')
except Exception as e:
print(e)
messages.error(request, "Beneficiary upload failed. Unsupported format, or corrupt file.")
return HttpResponseRedirect('/core/beneficiary-upload')
def get(self, request):
return render(request, self.template_name)
class UserRoleListView(ManagerMixin, ListView):
model = UserRole
template_name = 'core/userrole-list.html'
def get_queryset(self, *args, **kwargs):
if self.request.is_super_admin:
return self.model.objects.all()
else:
if 'project_id' in self.request.session:
project = Project.objects.get(id=self.request.session['project_id'])
else:
project = self.request.project
return self.model.objects.filter(project=project)
# class UserRoleCreateView(ManagerMixin, CreateView):
# model = UserRole
# template_name = 'core/userrole-form.html'
# form_class = UserRoleForm
# success_url = reverse_lazy('userrole_list')
class UserRoleCreateView(ManagerMixin, View):
def get(self, request, **kwargs):
if 'project_id' in self.request.session:
project = Project.objects.get(id=self.request.session['project_id'])
else:
project = self.request.project
form = UserRoleForm(project=project, is_super_admin=self.request.is_super_admin)
return render(request, 'core/userrole-form.html', {'form': form})
def post(self, request, **kwargs):
if 'project_id' in self.request.session:
project = Project.objects.get(id=self.request.session['project_id'])
else:
project = self.request.project
form = UserRoleForm(request.POST, project=project, is_super_admin=self.request.is_super_admin)
if form.is_valid():
obj = form.save(commit=False)
obj.save()
if obj.group.name == 'project-manager':
clusters = Cluster.objects.filter(project=obj.project)
for cluster in clusters:
obj.cluster.add(cluster)
else:
clusters = form.cleaned_data.get('cluster')
for cluster in clusters:
obj.cluster.add(cluster)
# send email to the user
if obj.user.email:
clusters = obj.cluster.all()
to_email = obj.user.email
mail_subject = 'User role assigned.'
message = render_to_string('core/user_role_email.html', {
'userrole': obj,
'clusters': clusters,
'domain': settings.SITE_URL,
})
email = EmailMessage(
mail_subject, message, to=[to_email]
)
email.send()
else:
pass
return HttpResponseRedirect(reverse('userrole_list'))
return render(request, 'core/userrole-form.html', {'form':form})
class UserRoleUpdateView(ManagerMixin, UpdateView):
model = UserRole
template_name = 'core/userrole-form.html'
form_class = UserRoleForm
success_url = reverse_lazy('userrole_list')
def get_form_kwargs(self):
kwargs = super(UserRoleUpdateView, self).get_form_kwargs()
if 'project_id' in self.request.session:
project = Project.objects.get(id=self.request.session['project_id'])
else:
project = project
kwargs['project'] = project
kwargs['is_super_admin'] = self.request.is_super_admin
return kwargs
class UserRoleDetailView(ManagerMixin, DetailView):
model = UserRole
template_name = 'core/userrole-detail.html'
class UserRoleDeleteView(ManagerMixin, DeleteView):
model = UserRole
template_name = 'core/userrole-delete.html'
success_url = reverse_lazy('userrole_list')
class SubmissionView(LoginRequiredMixin, View):
def get(self, request, **kwargs):
pk = kwargs.get('pk')
cluster_activity_group = ClusterAG.objects.filter(cluster_id=pk)
if 'project_id' in self.request.session:
project = Project.objects.get(id=self.request.session['project_id'])
else:
project = self.request.project
time_interval = ProjectTimeInterval.objects.filter(project=project)
return render(request, 'core/submission.html', {
'cluster_activity_groups': cluster_activity_group,
'pk': pk,
'interval': time_interval
})
class SubmissionListView(LoginRequiredMixin, View):
def get(self, request, **kwargs):
cluster_activity = ClusterA.objects.get(pk=kwargs.get('pk'))
submissions = Submission.objects.filter(cluster_activity=cluster_activity)
return render(request, 'core/submission_list.html', {'submissions': submissions, 'activity': cluster_activity})
def post(self, request, **kwargs):
if 'project_id' in self.request.session:
project = Project.objects.get(id=self.request.session['project_id'])
else:
project = self.request.project
aggregations_list = ActivityAggregate.objects.filter(project=project)
if 'approve' in request.POST:
if ',' in request.POST.get('approve'):
sub_id = request.POST.get('approve').replace(',', '')
else:
sub_id = request.POST.get('approve')
submission = Submission.objects.get(pk=sub_id)
submission.status = 'approved'
submission.save()
created = create_db_table(submission)
if created == 'error':
return render(request, 'core/submission_notification.html', {'msg': 'Submission approved but table not created as a beneficiary creation submission has already been approved.'})
if not created:
filled = fill_cseb_table(submission)
if not filled:
if aggregations_list:
for aggregations in aggregations_list:
aggregation_questions = aggregations.aggregation_fields
aggregation_answer = aggregations.aggregation_fields_value
answer_dict = {}
if aggregation_answer == {}:
for item in aggregation_questions:
for name, attributes in item.items():
for key, value in attributes.items():
if key in submission.instance.json:
answer_dict[value] = submission.instance.json[key]
aggregations.aggregation_fields_value = answer_dict
aggregations.save()
else:
for item in aggregation_questions:
for name, attributes in item.items():
for key, value in attributes.items():
if key in submission.instance.json:
if value in aggregation_answer:
previous_answer = aggregation_answer.get(value, '0')
aggregation_answer[value] = str(int(submission.instance.json[key]) + int(previous_answer))
else:
aggregation_answer[value] = submission.instance.json[key]
ActivityAggregateHistory.objects.create(aggregation=aggregations, aggregation_values=aggregations.aggregation_fields_value, date=datetime.now())
aggregations.aggregation_fields_value = aggregation_answer
aggregations.save()
order = submission.cluster_activity.activity.order
if order:
Submission.objects.filter(cluster_activity__activity__order__lte=order, beneficiary__cluster__project=project).update(status='approved')
elif 'reject' in request.POST:
if ',' in request.POST.get('reject'):
sub_id = request.POST.get('reject').replace(',', '')
else:
sub_id = request.POST.get('reject')
submission = Submission.objects.get(pk=sub_id)
submission.status = 'rejected'
submission.save()
if submission.instance.user:
to_email = submission.instance.user.email
mail_subject = 'Submission Rejected.'
message = render_to_string('core/submission_reject_email.html', {
'submission': submission.instance,
'rejected_by': request.user.username,
'activity': submission.cluster_activity.activity.name,
'cluster': submission.cluster_activity.cag.cluster.name,
'date': datetime.now(),
})
email = EmailMessage(
mail_subject, message, to=[to_email]
)
email.send()
elif 'approve-all' in request.POST:
Submission.objects.filter(cluster_activity__cag__cluster__project=project, status='pending').update(status='approved')
submissions = Submission.objects.filter(cluster_activity__cag__cluster__project=project, status="approved")
for submission in submissions:
created = create_db_table(submission)
if created == 'error':
return render(request, 'core/submission_notification.html', {'msg': 'Submission approved but table not created as a beneficiary creation submission has already been approved.'})
if not created:
filled = fill_cseb_table(submission)
if aggregations_list:
for aggregations in aggregations_list:
aggregation_questions = aggregations.aggregation_fields
aggregation_answer = aggregations.aggregation_fields_value
answer_dict = {}
if aggregation_answer == {}:
for item in aggregation_questions:
for name, attributes in item.items():
for key, value in attributes.items():
for instance in submissions:
if key in instance.instance.json:
answer_dict[value] = instance.instance.json[key]
aggregations.aggregation_fields_value = answer_dict
aggregations.save()
else:
for item in aggregation_questions:
for name, attributes in item.items():
for key, value in attributes.items():
for instance in submissions:
created = create_db_table(instance)
if not created:
if key in instance.instance.json:
if value in aggregation_answer:
previous_answer = aggregation_answer.get(value, '0')
aggregation_answer[value] = str(int(instance.instance.json[key]) + int(previous_answer))
else:
aggregation_answer[value] = submission.instance.json[key]
ActivityAggregateHistory.objects.create(aggregation=aggregations, aggregation_values=aggregations.aggregation_fields_value, date=datetime.now())
aggregations.aggregation_fields_value = aggregation_answer
aggregations.save()
elif 'approve-selected' in request.POST:
checked = request.POST.getlist('checked[]')
if checked:
for item in checked:
if ',' in item:
sub_id = item.replace(',', '')
else:
sub_id = item
submission = Submission.objects.get(id=int(sub_id))
submission.status = 'approved'
submission.save()
created = create_db_table(submission)
if created == 'error':
return render(request, 'core/submission_notification.html', {'msg': 'Submission approved but table not created as a beneficiary creation submission has already been approved.'})
if not created:
filled = fill_cseb_table(submission)
if not filled:
if aggregations_list:
for aggregations in aggregations_list:
aggregation_questions = aggregations.aggregation_fields
aggregation_answer = aggregations.aggregation_fields_value
answer_dict = {}
if aggregation_answer == {}:
for item in aggregation_questions:
for name, attributes in item.items():
for key, value in attributes.items():
if key in submission.instance.json:
answer_dict[value] = submission.instance.json[key]
aggregations.aggregation_fields_value = answer_dict
aggregations.save()
else:
for item in aggregation_questions:
for name, attributes in item.items():
for key, value in attributes.items():
if key in submission.instance.json:
if value in aggregation_answer:
previous_answer = aggregation_answer.get(value, '0')
aggregation_answer[value] = str(int(submission.instance.json[key]) + int(previous_answer))
else:
aggregation_answer[value] = submission.instance.json[key]
ActivityAggregateHistory.objects.create(aggregation=aggregations, aggregation_values=aggregations.aggregation_fields_value, date=datetime.now())
aggregations.aggregation_fields_value = aggregation_answer
aggregations.save()
order = submission.cluster_activity.activity.order
if order:
Submission.objects.filter(cluster_activity__cag__cluster__project=project, cluster_activity__activity__order__lte=order).update(status='approved')
# submissions = Submission.objects.filter(cluster_activity__activity__order__lte=order, status="approved").exclude(id=submission.id)
| |
" "
rval = f"\n {space*current_indent}/* INVOKE generated for {kernel_name} */\n"
if kernel_type == kernels.perpart_kernel_wrapper:
rval = rval + self.gen_invoke_perpart(kernel_name, current_indent, indent, kernel_type)
else:
raise NotImplementedError("gen_invoke not yet implemented")
rval = rval + f"{space*current_indent}/* End of INVOKE generated for {kernel_name} */\n\n"
return rval
def get_particle_access(self, index, field):
'''
Returns the code to access a particle of the given
index for the field provided.
:param str index: The index value to access.
:param str field: The field name to access.
TODO: We could check the field exists
'''
# Remove any extra " from the field from passing through the DSL
# FIXME
field = field.replace('"', '')
# Remove all the array indices and do something with them
extra_indices = ""
arrays = re.findall(r"\[[0-9*]*\]", field)
if arrays is not None:
for ind in arrays:
field = field.replace(ind, "")
ind = ind.replace("[", "")
ind = ind.replace("]", "")
extra_indices = extra_indices + ", " + ind
# Create a variable access is probably the only sane way to do this.
self._pairwise_visitor.addSlice(field)
self._per_part_visitor.addSlice(field)
self._main_visitor.addSlice(field)
assert index == "part1" # FIXME Not handling part2 accesses for pairwise yet.
return "_" + field + ".access(i, a" + extra_indices + ")"
def _get_particle_position_internal(self, dimension):
'''
Returns the index corresponding to a dimension accessed.
For Cabana, x -> 0, y-> 1, z->2
'''
if dimension == "x":
return "0"
if dimension == "y":
return "1"
if dimension == "z":
return "2"
raise InvalidNameError("The dimension argument should be x, y, or z")
def get_particle_position(self, dimension):
'''
Returns the code to access a particle's position
for each dimension. Dimensions are x/y/z. For FDPS
the positions are stored in a PS::F64vec, so we return
the relevant vector element
:param str dimension: The dimension ("x", "y" or "z") to access
:raises InvalidNameError: If the dimension argument is not
"x", "y" or "z".
:returns: The string to access a particle's position variable.
:rtype: str
'''
# FIXME
if dimension == "x":
return "core_part_position[0]"
if dimension == "y":
return "core_part_position[1]"
if dimension == "z":
return "core_part_position[2]"
raise InvalidNameError("The dimension argument should be x, y, or z")
def get_pointer(self, var_code, *args, **kwargs):
'''
Returns the code to access the pointer to the supplied var_code.
The var_code should already be Cabana C++ code.
:param str var_code: The Cabana C++ code to take pointer from.
:returns: The string pointer to the supplied var_code.
:rtype: str
'''
return "&(" + var_code + ")"
def set_cutoff(self, cutoff, var_type=CONSTANT):
'''
Set the cutoff for pairwise interactions. NYI
:raises: NotImplementedError
'''
raise NotImplementedError("Cabana backend doesn't yet support pairwise interactions")
def initialisation_code(self, particle_count, filename):
return self._input_module.call_input_cabana(particle_count, filename)
def gen_particle(self, particle):
# Store the particle for later
self._particle = particle
# I have performance concerns about putting a struct inside the AoSoA
# but it may be ok, we will see.
# Output the core part type
output = ""
output = output + "struct core_part_type{\n"
output = output + " double position[3];\n"
output = output + " double velocity[3];\n"
output = output + "};\n\n"
#Output the neighbour part type
output = output + "struct neighbour_part_type{\n"
output = output + " double cutoff;\n"
output = output + "};\n\n"
particle.add_element("core_part_position", "double[3]")
particle.add_element("core_part_velocity", "double[3]")
particle.add_element("neighbour_part_cutoff", "double")
# Sort particle by size of element
sizes = []
for element in particle.particle_type:
if element == "core_part" or element == "neighbour_part":
sizes.append(0)
continue
c_type = particle.particle_type[element]['type']
is_array = particle.particle_type[element]['is_array']
if is_array:
x = c_type.index("[")
base_type = c_type[0:x]
count = 1
array_str = c_type[x:]
while array_str.find("[") >= 0:
val = int(array_str[array_str.index("[")+1:array_str.index("]")])
count = count * val
array_str = array_str[array_str.index("]")+1:]
size = Cabana._type_sizes[base_type]
sizes.append(size * count)
else:
size = Cabana._type_sizes[c_type]
sizes.append(size)
fields = particle.particle_type.keys()
sorted_fields = [x for _, x in sorted(zip(sizes, fields), reverse=True)]
output = output + "enum FieldNames{"
first = True
for key in sorted_fields:
if key != "core_part" and key != "neighbour_part":
if first:
output = output + f"{key} = 0"
first = False
else:
output = output + f",\n {key}"
output = output + "\n };\n"
output = output + "using DataTypes = Cabana::MemberTypes<"
first = True
for key in sorted_fields:
if key != "core_part" and key != "neighbour_part":
if first:
c_type = particle.particle_type[key]['type']
output = output + c_type
first = False
else:
c_type = particle.particle_type[key]['type']
output = output + ",\n " + c_type
output = output + ">;\n"
return output
def gen_config(self, config):
# FIXME
output = ""
output = output + "struct boundary{\n"
output = output + " double x_min, x_max;\n"
output = output + " double y_min, y_max;\n"
output = output + " double z_min, z_max;\n"
output = output + "};\n\n"
output = output + "struct space_type{\n"
output = output + " boundary box_dims;\n"
output = output + " int nparts;\n"
output = output + "};\n\n"
# Currently empty neiughbour config
output = output + "struct neighbour_config_type{\n"
output = output + "};\n\n"
output = output + "struct config_view_type{\n"
# output = output + " space_type space;\n"
# output = output + " neighbour_config_type neighbour_config;\n"
for key in config.config_type:
is_array = config.config_type[key]['is_array']
c_type = config.config_type[key]['type']
varnam = key
if is_array:
# Move array index to the correct position for C++ definitions
x = c_type.index("[")
varnam = varnam + c_type[x:]
c_type = c_type[0:x]
output = output + f" {c_type} {varnam};\n"
output = output + "};\n\n"
output = output + "using config_struct_type = Kokkos::View<struct config_view_type*, MemorySpace>;\n"
output = output + "using config_struct_host = config_struct_type::HostMirror;\n"
output = output + "struct config_type{\n"
output = output + " config_struct_type config;\n"
output = output + " config_struct_host config_host;\n"
output = output + "};\n"
# There are some complex things to work out here. How do we add other Kokkos views into the config_type struct
# We probably need to do some extra type analysis in the config_type object.
return output
def cleanup(self, current_indent, *args, **kwargs):
rval = "}\n"
return rval
def initialise(self,particle_count, filename, current_indent, **kwargs):
# FIXME
space = " "
rval = space*current_indent + "Kokkos::ScopeGuard scope_guard(argc, argv);\n"
rval = rval + "{\n"
rval = rval + space*current_indent + "config_type config;\n"
rval = rval + space*current_indent + "config.config = config_struct_type(\"config\", 1);\n"
rval = rval + space*current_indent + "config.config_host = Kokkos::create_mirror_view(config.config);\n"
rval = rval + space*current_indent + f"{self._input_module.call_input_cabana(particle_count, filename, current_indent=current_indent)}\n"
rval = rval + space*current_indent + "Cabana::SimdPolicy<VectorLength, ExecutionSpace> simd_policy( 0, particle_aosoa.size());\n"
self.variable_scope = variable_scope()
for struct in self._structures.keys():
# Add this to the variable scope with a special c_type.
self.variable_scope.add_variable(struct, self._structures[struct], False)
rval = rval + space*current_indent + self._structures[struct] + " " + struct + ";\n"
# Need to do something with each kernel now.
# rval = rval + space*current_indent + "auto core_part_slice = Cabana::slice<core_part_space>(particle_aosoa);\n"
# rval = rval + space*current_indent + "auto neighbour_part_slice = Cabana::slice<neighbour_part_space>(particle_aosoa);\n"
# We need the particle type to be able to initialise correctly
for key in self._particle.particle_type:
if key != "core_part" and key != "neighbour_part":
rval = rval + space*current_indent + f"auto {key}_slice = Cabana::slice<{key}>" + "(particle_aosoa);\n"
# Generate the functors
for key in self._kernel_slices.keys():
rval = rval + space*current_indent + f"{key}_functor"
slice_names = []
for slices in self._kernel_slices[key]:
slice_names.append(f"decltype({slices}_slice)")
if len(slice_names) > 0:
rval = rval + "<"
rval = rval + ", ".join(slice_names) + ">"
rval = rval + f" {key}("
slice_names = []
for slices in self._kernel_slices[key]:
slice_names.append(f"{slices}_slice")
rval = rval + ", ".join(slice_names) + ", config.config"
slice_names = []
for struct in self._structures.keys():
slice_names.append(struct)
if len(slice_names) > 0:
rval = rval + ","
rval = rval + ", ".join(slice_names) + ");\n"
return rval
def create_variable(self, c_type, name, initial_value=None, **kwargs):
current_indent = kwargs.get("current_indent", 0)
if Cabana._type_map.get(c_type) is None:
raise UnsupportedTypeError("Cabana does not support type \"{0}\""
" in created variables.".format(c_type))
##Check name is allowed in C++
name = name.replace('"', '')
a = re.match("[a-zA-Z_][a-zA-Z_0-9]*", name)
if a is None or a.group(0) != name:
raise InvalidNameError("Cabana does not support \"{0}\" as a name"
" for variables.".format(name))
end = ";\n"
if initial_value is not None:
end = f" | |
<gh_stars>1-10
## ASYMMETRON ###
import itertools
import numpy as np
import warnings
from scipy.stats import binom_test
try:
from pybedtools import BedTool
except:
print("Pybedtools not imported")
try:
import visualizations
except:
print("visualizations not imported")
def pairs_generator(pathL1, pathL2, NamesL1, NamesL2):
"""
:param pathL1: list of paths
:param pathL2: list of paths
:param NamesL1: list of names, corresponding to pathsL1
:param NamesL2: list of names corresponding to pathsL2
:return: tuple of two lists. List 1 consists of all possible combinations of (pathsL1, pathsL2). List 2 consists
of the corresponding combinations of names
"""
return list(itertools.product(pathL1, pathL2)), list(itertools.product(NamesL1, NamesL2))
def read_BED(path, last_col=False):
"""
This function reads bed files.
last_col=True: If an extra column for a score (e.g. replication timing or gene expression) is given
This function returns a list of lists with the BED file information and the list of scores.
"""
if not last_col:
Data = []
with open(path) as f:
for line in f:
Data.append(line.strip().split()[:6])
return Data
elif last_col:
Data = []
Score = []
with open(path) as f:
for line in f:
Data.append(line.strip().split()[:6])
Score.append(float(line.strip().split()[-1]))
return Data, Score
else:
print("ERROR")
def binner(min_size, max_size, bin_no):
"""
:param min_size lower bound of the interval to divide into bins
:param max_size upper bound of the interval to divide into bins
:param bin_no the interval will be divided into that many bins
:returns list of tuples, representing the lower and upper limits of each subinterval after dividing into bins
This function separates the input interval into bins
"""
bin_size = float(max_size - min_size) / float(bin_no)
Bins = [(min_size + bin_size * k, min_size + bin_size * (k + 1)) for k in range(0, bin_no)]
return Bins
def separate_on_score(path_score, path, number_of_bins, number_of_files, expected_asym, expected_asym_c_d):
"""
path_score: is the path to the BED file with last column containing the scores.
path: the path to the second BED file.
number_of_bins: number of groups to generate based on the score min and max values and in which to perform the strand asymmetry analysis.
This would be useful e.g. if we want to see if expression levels are associated with mutational strand asymmetry or if replication timing is.
returns the strand asymmetry scores for same / opposite and convergent / divergent for each bin as two lists.
"""
# We should check Score to be integer / float in our checks
DataL, ScoreL = read_BED(path_score, last_col=True)
DataL2 = read_BED(path, last_col=False)
StepsL = binner(min(ScoreL), max(ScoreL), number_of_bins)
# Separates the DataL based on the ScoreL bins intro groups.
DataStepsL = []
ScoresStepsL = []
for step in StepsL:
DataStep = []
ScoreStep = []
for i in range(len(ScoreL)):
if step[0] <= ScoreL[i] <= step[1]:
DataStep += [DataL[i]]
ScoreStep += [ScoreL[i]]
DataStepsL += [DataStep]
ScoresStepsL += [ScoreStep]
# Calculates the asymmetry for same / opposite and convergent / divergent asymmetry for each bin.
Ratio_Same_Opposite = []
Ratio_Convergent_Divergent = []
Binom_Test_Same_Opposite = []
Binom_Test_Same_Opposite_Bonferoni = []
Binom_Test_Convergent_Divergent = []
Binom_Test_Convergent_Divergent_Bonferoni = []
for step in range(len(StepsL)):
p_p_step, m_m_step, p_m_step, m_p_step, same_strand_step, opposite_strand_step, convergent_step, divergent_step = overlap(
DataStepsL[step], DataL2)
if same_strand_step + opposite_strand_step != 0:
Ratio_Same_Opposite.append(same_strand_step / float(same_strand_step + opposite_strand_step))
else:
Ratio_Same_Opposite.append(0.5)
if p_m_step + m_p_step != 0:
Ratio_Convergent_Divergent.append(p_m_step / float(p_m_step + m_p_step))
binom_same_opposite = binom_test(same_strand_step, same_strand_step + opposite_strand_step, expected_asym)
binom_same_opposite_Bonferoni = min(1, binom_same_opposite * number_of_files)
binom_convergent_divergent = binom_test(p_m_step, p_m_step + m_p_step, expected_asym_c_d)
binom_convergent_divergent_Bonferoni = min(1, binom_convergent_divergent * number_of_files)
Binom_Test_Same_Opposite.append(binom_same_opposite);
Binom_Test_Same_Opposite_Bonferoni.append(binom_same_opposite_Bonferoni);
Binom_Test_Convergent_Divergent.append(binom_convergent_divergent);
Binom_Test_Convergent_Divergent_Bonferoni.append(binom_convergent_divergent_Bonferoni)
return Ratio_Same_Opposite, Ratio_Convergent_Divergent, StepsL, Binom_Test_Same_Opposite, Binom_Test_Same_Opposite_Bonferoni, Binom_Test_Convergent_Divergent, Binom_Test_Convergent_Divergent_Bonferoni
def strand_annotate_third_BED_overlap(unnotated_path, annotated_path):
"""
For a third file that doesn't have its own annotation e.g. mutation files since mutations are on both strands
This function enables the strand annotation of such a file
Using an annotated file as mirror, for overlapping instances
Returns the unnotated file, with strand annotation
"""
DataL_unnotated = BedTool(unnotated_path)
DataL_annotated = read_BED(annotated_path)
Overlap_strand = DataL_unnotated.intersect(DataL_annotated, wao=True)
Overlap_strand_df = Overlap_strand.to_dataframe()
Chromosome = list(Overlap_strand_df.iloc[:, 0])
Start = list(Overlap_strand_df.iloc[:, 1])
End = list(Overlap_strand_df.iloc[:, 2])
ID = list(Overlap_strand_df.iloc[:, 3])
Strand = list(Overlap_strand_df.iloc[:, -2])
Start_Annotated = list(Overlap_strand_df.iloc[:, -6])
End_Annotated = list(Overlap_strand_df.iloc[:, -5])
Chromosome, Start, End, ID, Strand, Start_Annotated, End_Annotated = zip(
*((chrom, start, end, id_used, strand, start_annot, end_annot) for chrom, start, end, id_used, strand, start_annot, end_annot in
zip(Chromosome, Start, End, ID, Strand, Start_Annotated, End_Annotated) if strand in ["+", "-"]))
DataL = []
for i in range(len(Chromosome)):
if Chromosome[i] == DataL[-1][0] and Start[i] == DataL[-1][1] and End[i] == DataL[-1][2]:
center_previous = (DataL[-1][1] + DataL[-1][2])/2
center_current = (Start[i]+End[i])/2
center_annotated = (Start_Annotated[i] + End_Annotated[i]) / 2
if abs(center_annotated - center_previous) > abs(center_annotated - center_current):
DataL[-1] = [Chromosome[i], Start[i], End[i], ID[i], ".", Strand[i]]
else:
pass
else:
DataL.append([Chromosome[i], Start[i], End[i], ID[i], ".", Strand[i]])
return DataL
def overlap(path1, path2):
"""
This is the main function of contained_asymmetries.py
Takes as inputs the paths to the two BED files to compare.
Uses pybedtools intersect function to find overlapping coordinates between regions and motifs.
Returns the number of occurrences in each orientation for Regions:BED path1 to Motifs:BED path2.
"""
DataL1 = BedTool(path1).sort()
DataL2 = BedTool(path2).sort()
overlap = DataL1.intersect(DataL2, wao=True)
Overlap_df = overlap.to_dataframe()
Strand1 = list(Overlap_df.iloc[:, 5])
Strand2 = list(Overlap_df.iloc[:, 11])
p_p, m_m, p_m, m_p, same_strand, opposite_strand, convergent, divergent = orientation(Strand1, Strand2)
return p_p, m_m, p_m, m_p, same_strand, opposite_strand, convergent, divergent
def proximal(path1, path2, window_min, window_max, upstream=False, downstream=False, bins=None):
"""
This is the main function of pairwise_asymmetries.py
Uses pybedtools closest function to find proximal coordinates
Then calculates asymmetry through orientation function for proximal pairs
# the flags it uses from here https://bedtools.readthedocs.io/en/latest/content/tools/closest.html
if bins==True then return not the counts but the lists of counts to bin them
"""
# Finds the occurrences within the proximity limits and saves their pairwise orientation.
DataL1 = BedTool(path1).sort()
DataL2 = BedTool(path2).sort()
if upstream == downstream and upstream == True:
closest = DataL1.closest(DataL2, D='ref')
elif upstream is True:
closest = DataL1.closest(DataL2, D='ref', id=False, iu=True)
elif downstream is True:
closest = DataL1.closest(DataL2, D='ref', iu=False, id=True)
else:
closest = DataL1.closest(DataL2, D='ref')
closest_df = closest.to_dataframe()
Strand1_init = list(closest_df.iloc[:, 5])
Strand2_init = list(closest_df.iloc[:, 11])
Distance_init = [i for i in list(closest_df.iloc[:, -1])]
Distance1_temp, Strand1, Strand2 = zip(
*((dist, strand1, strand2) for dist, strand1, strand2 in zip(Distance_init, Strand1_init, Strand2_init) if
abs(dist) <= window_max and abs(dist) >= window_min and dist >= 0))
Distance2_temp, Strand1_temp, Strand2_temp = zip(
*((dist, strand2, strand1) for dist, strand1, strand2 in zip(Distance_init, Strand1_init, Strand2_init) if
abs(dist) <= window_max and abs(dist) >= window_min and dist < 0 ))
Distance = list(Distance1_temp)+list(Distance2_temp)
Strand1 = list(Strand1)+list(Strand1_temp)
Strand2 = list(Strand2)+list(Strand2_temp)
p_p, m_m, p_m, m_p, same_strand, opposite_strand, convergent, divergent = orientation(Strand1, Strand2)
# Calculate the distance distributions for all orientations
Distances_orientations = get_distance_orientations(Distance, Strand1, Strand2, window_min, window_max)
p_pL_bin = []
m_mL_bin = [] # Same orientation
p_mL_bin = []
m_pL_bin = [] # Opposite orientation
same_strandL_bin = []
opposite_strandL_bin = [] # Combined same / opposite orientations
convergentL_bin = []
divergentL_bin = []
Bins = []
if bins is not None:
# Performs the same analysis for each bin.
Bins = binner(window_min, window_max, bins)
for index, bin_i in enumerate(Bins):
Strand1Bin = []
Strand2Bin = []
min_bin, max_bin = bin_i
for k in range(len(Distance)):
if Distance[k] >= min_bin and Distance[k] < max_bin:
Strand1Bin.append(Strand1[k])
Strand2Bin.append(Strand2[k])
p_p_bin, m_m_bin, p_m_bin, m_p_bin, same_strand_bin, opposite_strand_bin, convergent_bin, divergent_bin = orientation(
Strand1Bin, Strand2Bin)
p_pL_bin.append(p_p_bin)
m_mL_bin.append(m_m_bin) # Same orientation, per bin
p_mL_bin.append(p_m_bin)
m_pL_bin.append(m_p_bin) # Opposite orientation per bin
same_strandL_bin.append(same_strand_bin)
opposite_strandL_bin.append(opposite_strand_bin)
convergentL_bin.append(convergent_bin)
divergentL_bin.append(divergent_bin)
return (Distances_orientations, p_p, m_m, p_m, m_p, same_strand, opposite_strand, convergent, divergent), (
Bins, p_pL_bin, m_mL_bin, p_mL_bin, m_pL_bin, same_strandL_bin, opposite_strandL_bin, convergentL_bin,
divergentL_bin)
def get_distance_orientations(DistanceL, Strand1L, Strand2L, window_min, window_max):
same_strandL_distance = []
opposite_strandL_distance = []
divergentL_distance = []
convergentL_distance = []
for index in range(len(Strand1L)):
if (DistanceL[index] <= window_max and DistanceL[index] >= window_min):
sign1 = Strand1L[index]
sign2 = Strand2L[index]
if sign1 in ["+", "-"] and sign2 in ["+", "-"]:
if sign1 == sign2:
same_strandL_distance.append(DistanceL[index])
else:
opposite_strandL_distance.append(DistanceL[index])
if sign1 == "+" and sign2 == "-":
convergentL_distance.append(DistanceL[index])
elif sign1 == "-" and sign2 == "+":
divergentL_distance.append(DistanceL[index])
return (same_strandL_distance, opposite_strandL_distance, divergentL_distance, convergentL_distance)
def orientation(sign1L, sign2L):
"""
This function takes as input two lists of signs and calculates their relative position (same strand,
opposite strand, convergent and divergent)
:param signs1L list | |
<filename>demo_quantize_methods.py<gh_stars>0
import timeit
from collections import Counter
from typing import Callable, Tuple, Optional, Union, List, Type
from PIL import Image
import cv2
import numpy as np
import scipy.cluster
from sklearn.cluster import KMeans, MeanShift, MiniBatchKMeans
import sklearn.utils
# import sklearn.metrics
from pyclustering.cluster import (
bsas,
mbsas,
dbscan,
optics,
syncnet,
syncsom,
ttsas,
xmeans,
center_initializer,
elbow,
kmeans,
kmedians,
)
from pyclustering.utils import type_metric, distance_metric
MAX_SIZE = 500
def test_pillow(
img_input: Image.Image, method: int
) -> Tuple[Type[Image.Image], List[List[int]]]:
img: Image.Image = img_input.copy()
img.thumbnail((MAX_SIZE, MAX_SIZE), Image.NEAREST)
threshold_pixel_percentage: float = 0.05
nb_colours: int = 20
nb_colours_under_threshold: int
nb_pixels: int = img.width * img.height
quantized_img: Image.Image
while True:
# method 0 = median cut 1 = maximum coverage 2 = fast octree
quantized_img = img.quantize(colors=nb_colours, method=method, kmeans=0)
nb_colours_under_threshold = 0
colours_list: [Tuple[int, int]] = quantized_img.getcolors(nb_colours)
for (count, pixel) in colours_list:
if count / nb_pixels < threshold_pixel_percentage:
nb_colours_under_threshold += 1
if nb_colours_under_threshold == 0:
break
nb_colours -= -(-nb_colours_under_threshold // 2) # ceil integer division
palette: [int] = quantized_img.getpalette()
colours_list: [[int]] = [palette[i : i + 3] for i in range(0, nb_colours * 3, 3)]
return quantized_img, colours_list
def test_pillow_median_cut(
img_input: Image.Image
) -> Tuple[Type[Image.Image], List[List[int]]]:
return test_pillow(img_input, 0)
def test_pillow_maximum_coverage(
img_input: Image.Image
) -> Tuple[Type[Image.Image], List[List[int]]]:
return test_pillow(img_input, 1)
def test_pillow_fast_octree(
img_input: Image.Image
) -> Tuple[Type[Image.Image], List[List[int]]]:
return test_pillow(img_input, 2)
def get_img_data(
img_input: Image.Image,
mini: bool = False,
conversion_method: int = cv2.COLOR_RGB2BGR,
) -> Tuple[np.ndarray, int, np.ndarray]:
img: np.ndarray = cv2.cvtColor(np.array(img_input), conversion_method)
ratio: float = min(
MAX_SIZE / img.shape[0], MAX_SIZE / img.shape[1]
) # calculate ratio
if mini:
ratio /= 6
img = cv2.resize(img, None, fx=ratio, fy=ratio, interpolation=cv2.INTER_AREA)
nb_pixels: int = img.size
flat_img: np.ndarray = img.reshape((-1, 3))
flat_img: np.ndarray = np.float32(flat_img)
return img, nb_pixels, flat_img
def process_result(
center: np.ndarray,
label: np.ndarray,
shape: Tuple[int, int, int],
conversion_method: int = cv2.COLOR_BGR2RGB,
) -> Tuple[Type[Image.Image], np.ndarray]:
center: np.ndarray = np.uint8(center)
quantized_img: np.ndarray = center[label]
quantized_img = quantized_img.reshape(shape)
quantized_img = cv2.cvtColor(quantized_img, conversion_method)
center = cv2.cvtColor(np.expand_dims(center, axis=0), conversion_method)[0]
return Image.fromarray(quantized_img), center
def update_nb_colours(
label: np.ndarray,
nb_pixels: int,
threshold_pixel_percentage: float,
nb_colours: int, # , flat_img: np.ndarray
) -> Tuple[int, int]:
nb_colours_under_threshold: int = 0
label = label.flatten()
colour_count: Counter[int] = Counter(label)
for (pixel, count) in colour_count.items():
if count / nb_pixels < threshold_pixel_percentage:
nb_colours_under_threshold += 1
# silhouette = sklearn.metrics.silhouette_score(flat_img, label, metric='euclidean', sample_size=1000)
# print(f'nb_colours = {nb_colours}, silhouette_score = {silhouette}')
nb_colours -= -(-nb_colours_under_threshold // 2) # ceil integer division
return nb_colours, nb_colours_under_threshold
def test_opencv(
img_input: Image.Image, method1: int, method2: int
) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input, False, method1)
threshold_pixel_percentage: float = 0.01
nb_colours: int = 20
nb_colours_under_threshold: int = nb_colours
criteria: Tuple[int, int, float] = (
cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER,
10,
1.0,
)
center: Optional[np.ndarray] = None
label: Optional[np.ndarray] = None
while nb_colours_under_threshold > 0:
ret: float
ret, label, center = cv2.kmeans(
flat_img, nb_colours, None, criteria, 10, cv2.KMEANS_PP_CENTERS
)
nb_colours, nb_colours_under_threshold = update_nb_colours(
label, nb_pixels, threshold_pixel_percentage, nb_colours # , flat_img
)
return process_result(center, label, img.shape, method2)
def test_opencv_rgb(img_input: Image.Image) -> Tuple[Type[Image.Image], np.ndarray]:
return test_opencv(img_input, cv2.COLOR_RGB2BGR, cv2.COLOR_BGR2RGB)
def test_opencv_hsv(img_input: Image.Image) -> Tuple[Type[Image.Image], np.ndarray]:
return test_opencv(img_input, cv2.COLOR_RGB2HSV, cv2.COLOR_HSV2RGB)
def test_opencv_lab(img_input: Image.Image) -> Tuple[Type[Image.Image], np.ndarray]:
return test_opencv(img_input, cv2.COLOR_RGB2Lab, cv2.COLOR_Lab2RGB)
def test_scipy(img_input: Image.Image) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input)
# minimum percentage of image coverage each colour needs to be, lower for more colours
threshold_pixel_percentage: float = 0.02
nb_colours: int = 20
nb_colours_under_threshold: int = nb_colours
centroids: Optional[np.ndarray] = None
qnt: Optional[np.ndarray] = None
while nb_colours_under_threshold > 0:
# performing the clustering
centroids, _ = scipy.cluster.vq.kmeans(flat_img, nb_colours)
# quantization
qnt, _ = scipy.cluster.vq.vq(flat_img, centroids)
nb_colours, nb_colours_under_threshold = update_nb_colours(
qnt, nb_pixels, threshold_pixel_percentage, nb_colours # , flat_img
)
# reshaping the result of the quantization
centers_idx: np.ndarray = np.reshape(qnt, (img.shape[0], img.shape[1]))
return process_result(centroids, centers_idx, img.shape)
def test_scipy2(img_input: Image.Image) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input)
# minimum percentage of image coverage each colour needs to be, lower for more colours
threshold_pixel_percentage: float = 0.02
nb_colours: int = 20
nb_colours_under_threshold: int = nb_colours
centroids: Optional[np.ndarray] = None
qnt: Optional[np.ndarray] = None
flat_img_sample: np.ndarray = sklearn.utils.shuffle(flat_img, random_state=0)[:1000]
while nb_colours_under_threshold > 0:
# performing the clustering
centroids, _ = scipy.cluster.vq.kmeans(flat_img_sample, nb_colours)
# quantization
qnt, _ = scipy.cluster.vq.vq(flat_img, centroids)
nb_colours, nb_colours_under_threshold = update_nb_colours(
qnt, nb_pixels, threshold_pixel_percentage, nb_colours # , flat_img
)
# reshaping the result of the quantization
centers_idx: np.ndarray = np.reshape(qnt, (img.shape[0], img.shape[1]))
return process_result(centroids, centers_idx, img.shape)
def test_sklearn_kmeans(img_input: Image.Image) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input)
# minimum percentage of image coverage each colour needs to be, lower for more colours
threshold_pixel_percentage: float = 0.02
nb_colours: int = 20
nb_colours_under_threshold: int = nb_colours
center: Optional[np.ndarray] = None
label: Optional[np.ndarray] = None
while nb_colours_under_threshold > 0:
kmeans_instance: KMeans = KMeans(n_clusters=nb_colours, random_state=0).fit(
flat_img
)
label = kmeans_instance.labels_
center = kmeans_instance.cluster_centers_
nb_colours, nb_colours_under_threshold = update_nb_colours(
label, nb_pixels, threshold_pixel_percentage, nb_colours # , flat_img
)
return process_result(center, label, img.shape)
def test_sklearn_iter(
img_input: Image.Image, constructor: Callable
) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input)
# minimum percentage of image coverage each colour needs to be, lower for more colours
threshold_pixel_percentage: float = 0.02
nb_colours: int = 20
nb_colours_under_threshold: int = nb_colours
center: Optional[np.ndarray] = None
label: Optional[np.ndarray] = None
flat_img_sample: np.ndarray = sklearn.utils.shuffle(flat_img, random_state=0)[:1000]
while nb_colours_under_threshold > 0:
kmeans_instance: Union[KMeans, MiniBatchKMeans] = constructor(
n_clusters=nb_colours, random_state=42
).fit(flat_img_sample)
center = kmeans_instance.cluster_centers_
label = kmeans_instance.predict(flat_img)
nb_colours, nb_colours_under_threshold = update_nb_colours(
label, nb_pixels, threshold_pixel_percentage, nb_colours # , flat_img
)
return process_result(center, label, img.shape)
def test_sklearn_kmeans2(
img_input: Image.Image
) -> Tuple[Type[Image.Image], np.ndarray]:
return test_sklearn_iter(img_input, KMeans)
def test_sklearn_mini_batch_kmeans(
img_input: Image.Image
) -> Tuple[Type[Image.Image], np.ndarray]:
return test_sklearn_iter(img_input, MiniBatchKMeans)
def test_sklearn_mean_shift(
img_input: Image.Image
) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input)
center: np.ndarray
label: np.ndarray
flat_img_sample: np.ndarray = sklearn.utils.shuffle(flat_img, random_state=0)[:1000]
clusterer_instance: MeanShift = MeanShift().fit(flat_img_sample)
center = clusterer_instance.cluster_centers_
label = clusterer_instance.predict(flat_img)
return process_result(center, label, img.shape)
def process_pycluster_result(
flat_img: np.ndarray,
clusters: [[int]],
representatives: [[float]],
shape: Tuple[int, int, int],
conversion_method: int = cv2.COLOR_BGR2RGB,
) -> Tuple[Type[Image.Image], np.ndarray]:
representatives: np.ndarray = np.uint8(representatives)
for index_cluster, cluster in enumerate(clusters):
for pixel in cluster:
flat_img[pixel] = representatives[index_cluster]
quantized_img: np.ndarray = np.uint8(flat_img.reshape(shape))
quantized_img = cv2.cvtColor(quantized_img, conversion_method)
representatives = cv2.cvtColor(
np.expand_dims(representatives, axis=0), conversion_method
)[0]
return Image.fromarray(quantized_img), representatives
def test_pycluster_threshold(
img_input: Image.Image, func: Callable
) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input)
# Prepare algorithm's parameters.
max_clusters: int = 20
threshold: float = 15
# this function gave me the best result with the lest colours
clusterer: bsas.bsas = func(
flat_img,
max_clusters,
threshold,
metric=distance_metric(type_metric.CHI_SQUARE),
)
clusterer.process()
clusters: [[int]] = clusterer.get_clusters()
representatives: [[float]] = clusterer.get_representatives()
return process_pycluster_result(flat_img, clusters, representatives, img.shape)
def test_pycluster_bsas(img_input: Image.Image) -> Tuple[Type[Image.Image], np.ndarray]:
return test_pycluster_threshold(img_input, bsas.bsas)
def test_pycluster_mbsas(
img_input: Image.Image
) -> Tuple[Type[Image.Image], np.ndarray]:
return test_pycluster_threshold(img_input, mbsas.mbsas)
def test_pycluster_neighbours(
img_input: Image.Image, func: Callable
) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input)
# Prepare algorithm's parameters.
eps: float = 0.7
neighbors: int = len(flat_img) // 1000
clusterer: Union[dbscan.dbscan, optics.optics] = func(flat_img, eps, neighbors)
clusterer.process()
clusters: [[int]] = clusterer.get_clusters()
representatives: np.ndarray = np.asarray(
[
np.mean([flat_img[pixel] for pixel in cluster], axis=0)
for cluster in clusters
]
)
return process_pycluster_result(flat_img, clusters, representatives, img.shape)
def test_pycluster_dbscan(
img_input: Image.Image
) -> Tuple[Type[Image.Image], np.ndarray]:
return test_pycluster_neighbours(img_input, dbscan.dbscan)
def test_pycluster_optics(
img_input: Image.Image
) -> Tuple[Type[Image.Image], np.ndarray]:
return test_pycluster_neighbours(img_input, optics.optics)
def test_pycluster_syncnet(
img_input: Image.Image
) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input, True)
# Prepare algorithm's parameters.
radius: float = 50
network: syncnet.syncnet = syncnet.syncnet(flat_img, radius)
clusterer: syncnet.syncnet_analyser = network.process()
clusters: [[int]] = clusterer.allocate_clusters()
representatives: np.ndarray = np.asarray(
[
np.mean([flat_img[pixel] for pixel in cluster], axis=0)
for cluster in clusters
]
)
return process_pycluster_result(flat_img, clusters, representatives, img.shape)
def test_pycluster_syncsom(
img_input: Image.Image
) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input, True)
# Prepare algorithm's parameters.
radius: float = 0.001
rows: int = 2
cols: int = 2
clusterer: syncsom.syncsom = syncsom.syncsom(flat_img, rows, cols, radius)
clusterer.process()
clusters: [[int]] = clusterer.get_clusters()
representatives: np.ndarray = np.asarray(
[
np.mean([flat_img[pixel] for pixel in cluster], axis=0)
for cluster in clusters
]
)
return process_pycluster_result(flat_img, clusters, representatives, img.shape)
def test_pycluster_2threshold(
img_input: Image.Image, func: Callable
) -> Tuple[Type[Image.Image], np.ndarray]:
img, nb_pixels, flat_img = get_img_data(img_input)
# Prepare algorithm's parameters.
threshold1: float = 70
threshold2: float = 120
# Manhattan, although not particularly good for colour distance, gave me the best results
clusterer: ttsas.ttsas = func(
flat_img, threshold1, threshold2, metric=distance_metric(type_metric.MANHATTAN)
)
clusterer.process()
clusters: [[int]] = clusterer.get_clusters()
representatives: [[float]] = clusterer.get_representatives()
return process_pycluster_result(flat_img, clusters, representatives, img.shape)
def test_pycluster_ttsas(
img_input: Image.Image
) -> Tuple[Type[Image.Image], np.ndarray]:
return test_pycluster_2threshold(img_input, ttsas.ttsas)
def test_pycluster_xmeans(
img_input: Image.Image
) -> | |
"""
Original source: https://github.com/dmyersturnbull/tyrannosaurus
Copyright 2020–2021 <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0
Command-line interface.
"""
from __future__ import annotations
import logging
import os
from pathlib import Path
from dataclasses import dataclass
from subprocess import check_call # nosec
from typing import Optional, Sequence
import typer
from tyrannosaurus.clean import Clean
from tyrannosaurus.recipes import Recipe
from tyrannosaurus.envs import CondaEnv
from tyrannosaurus.context import Context
from tyrannosaurus.enums import DevStatus, License
from tyrannosaurus.helpers import _Env
from tyrannosaurus.new import New
from tyrannosaurus.sync import Sync
from tyrannosaurus.update import Update
logger = logging.getLogger(__package__)
class _DevNull: # pragma: no cover
"""Pretends to write but doesn't."""
def write(self, msg):
pass
def flush(self):
pass
def close(self):
pass
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
class Msg:
@classmethod
def success(cls, msg: str) -> None:
msg = typer.style(msg, fg=typer.colors.BLUE, bold=True)
typer.echo(msg)
@classmethod
def info(cls, msg: str) -> None:
typer.echo(msg)
@classmethod
def failure(cls, msg: str) -> None:
msg = typer.style(msg, fg=typer.colors.RED, bold=True)
typer.echo(msg)
@classmethod
def write_info(cls):
# avoid importing above, just in case a user runs --version, --info, or info on an improperly installed version
from tyrannosaurus import __date__, __version__
Msg.info(f"Tyrannosaurus v{__version__} ({__date__})")
@dataclass(frozen=True, repr=True)
class CliState:
dry_run: bool = False
verbose: bool = False
def __post_init__(self):
if self.verbose:
logger.setLevel(logging.DEBUG)
def tyranno_main(
version: bool = False,
info: bool = False,
):
"""
Tyrannosaurus.
Tyrannosaurus can create new modern Python projects from a template
and synchronize metadata across the project.
Args:
version: Write version and exit
info: Write info and exit (same as 'tyrannosaurus info')
"""
if version or info:
Msg.write_info()
raise typer.Exit()
cli = typer.Typer(callback=tyranno_main, add_completion=True)
class CliCommands:
"""
Commands for Tyrannosaurus.
"""
_APACHE2 = typer.Option(License.apache2)
_ENV_YAML = Path("environment.yml")
@staticmethod
@cli.command()
def new(
name: str,
license: str = _APACHE2,
user: Optional[str] = None,
authors: Optional[str] = None,
description: str = "A Python project",
keywords: str = "",
version: str = "0.1.0",
status: Optional[DevStatus] = None,
track: bool = False,
tyranno: str = "current",
prompt: bool = False,
verbose: bool = False,
) -> None: # pragma: no cover
"""
Creates a new project.
Args:
name: The name of the project, including any dashes or capital letters
license: The name of the license. One of: apache2, cc0, ccby, ccbync, gpl3, lgpl3, mit
user: Github repository user or org name
authors: List of author names, comma-separated
description: A <100 char description for the project
keywords: A list of <= 5 keywords, comma-separated
version: A semantic version (for your project)
status: A PyPi classifier for development status;
if None, defaults to "alpha" if version<1.0 else "production"
track: Track a remote repo (should be an empty repo; otherwise there will be a merge conflict)
tyranno: Version of tyrannosaurus to use as the template; can be:
an exact version number,
'current' for the currently installed version,
'stable' for the latest stable version,
or 'latest' for the bleeding-edge version
prompt: Prompt for info
verbose: Output more information
"""
state = CliState(verbose=verbose)
if version.startswith("v"):
version = version[1:]
if status is None:
status = DevStatus.guess_from_version(version)
if prompt:
name = typer.prompt("name", type=str, default=name)
description = typer.prompt("description", type=str, default="A new project")
version = typer.prompt("version", type=str, default="0.1.0")
if version.startswith("v"):
version = version[1:]
if status is None:
status = DevStatus.guess_from_version(version)
status = typer.prompt("status", type=DevStatus, default=status)
license = typer.prompt("license", type=License, default="apache2").lower()
user = typer.prompt(
"user", type=str, prompt_suffix=" [default: from 'git config']", default=user
)
authors = typer.prompt(
"authors",
type=str,
prompt_suffix=" [comma-separated; default: from 'git config']",
default=authors,
)
description = typer.prompt("description", type=str, default=description)
keywords = typer.prompt(
"keywords", type=str, prompt_suffix=" [comma-separated]", default=keywords
)
track = typer.prompt("track", type=bool, default=track)
tyranno = typer.prompt(
"tyranno",
type=str,
prompt_suffix=" ['current', 'stable', 'latest', or a version]",
default=tyranno,
)
e = _Env(user=user, authors=authors)
keywords = keywords.split(",")
path = Path(name)
New(
name,
license_name=license,
username=e.user,
authors=e.authors,
description=description,
keywords=keywords,
version=version,
status=status,
should_track=track,
tyranno_vr=tyranno.strip(" \r\n\t"),
debug=state.verbose,
).create(path)
Msg.success(f"Done! Created a new repository under {name}")
Msg.success(
"See https://tyrannosaurus.readthedocs.io/en/latest/guide.html#to-do-list-for-new-projects"
)
if track:
repo_to_track = f"https://github.com/{e.user}/{name.lower()}.git"
Msg.info(f"Tracking {repo_to_track}")
Msg.info(f"Checked out branch main tracking origin/main")
@staticmethod
@cli.command()
def sync(
dry_run: bool = False,
verbose: bool = False,
) -> None: # pragma: no cover
"""
Syncs project metadata between configured files.
Args:
dry_run: Don't write; just output what it would do
verbose: Output more information
"""
state = CliState(dry_run=dry_run, verbose=verbose)
context = Context(Path(os.getcwd()), dry_run=state.dry_run)
Msg.info("Syncing metadata...")
Msg.info("Currently, only targets 'init' and 'recipe' are implemented.")
targets = Sync(context).sync()
Msg.success(f"Done. Synced to {len(targets)} targets: {targets}")
@staticmethod
@cli.command()
def env(
path: Path = _ENV_YAML,
name: Optional[str] = None,
dev: bool = False,
extras: bool = False,
dry_run: bool = False,
verbose: bool = False,
) -> None: # pragma: no cover
"""
Generates an Anaconda environment file.
Args:
path: Write to his path
name: The name of the environment; defaults to the project name
dev: Include development/build dependencies
extras: Include optional dependencies
dry_run: Don't write; just output what it would do
verbose: Output more information
"""
state = CliState(dry_run=dry_run, verbose=verbose)
typer.echo("Writing environment file...")
context = Context(Path(os.getcwd()), dry_run=state.dry_run)
if name is None:
name = context.project
CondaEnv(name, dev=dev, extras=extras).create(context, path)
Msg.success(f"Wrote environment file {path}")
@staticmethod
@cli.command()
def recipe(
dry_run: bool = False,
verbose: bool = False,
) -> None: # pragma: no cover
"""
Generates a Conda recipe using grayskull.
Args:
dry_run: Don't write; just output what it would do
verbose: Output more information
"""
state = CliState(dry_run=dry_run, verbose=verbose)
dry_run = state.dry_run
context = Context(Path(os.getcwd()), dry_run=dry_run)
output_path = context.path / "recipes"
Recipe(context).create(output_path)
Msg.success(f"Generated a new recipe under {output_path}")
@staticmethod
@cli.command()
def update(
auto_fix=typer.Option("auto_fix", hidden=True),
verbose: bool = False,
) -> None: # pragma: no cover
"""
Finds and lists dependencies that could be updated.
Args:
auto_fix: Update dependencies in place (not supported yet)
verbose: Output more information
"""
state = CliState(verbose=verbose)
context = Context(Path(os.getcwd()), dry_run=not auto_fix)
updates, dev_updates = Update(context).update()
Msg.info("Main updates:")
for pkg, (old, up) in updates.items():
Msg.info(f" {pkg}: {old} --> {up}")
Msg.info("Dev updates:")
for pkg, (old, up) in dev_updates.items():
Msg.info(f" {pkg}: {old} --> {up}")
if not state.dry_run:
Msg.failure("Auto-fixing is not supported yet!")
@staticmethod
@cli.command()
def clean(
dists: bool = False,
aggressive: bool = False,
hard_delete: bool = False,
dry_run: bool = False,
verbose: bool = False,
) -> None: # pragma: no cover
"""
Removes unwanted files.
Deletes the contents of ``.tyrannosaurus``.
Then trashes temporary and unwanted files and directories to a tree under ``.tyrannosaurus``.
Args:
dists: Remove dists
aggressive: Delete additional files, including .swp and .ipython_checkpoints
hard_delete: If true, call shutil.rmtree instead of moving to .tyrannosaurus
dry_run: Don't write; just output what it would do
verbose: Output more information
"""
state = CliState(verbose=verbose, dry_run=dry_run)
dry_run = state.dry_run
trashed = Clean(dists, aggressive, hard_delete, dry_run).clean(Path(os.getcwd()))
Msg.info(f"Trashed {len(trashed)} paths.")
@staticmethod
@cli.command()
def info() -> None: # pragma: no cover
"""
Prints Tyrannosaurus info.
"""
Msg.write_info()
@staticmethod
@cli.command()
def build(
bare: bool = False,
dry_run: bool = False,
verbose: bool = False,
) -> None: # pragma: no cover
"""
Syncs, builds, and tests your project.
If ``bare`` is NOT set, runs:
- tyrannosaurus sync
- poetry lock
- tox
- tyrannosaurus clean
z----------------------------------------z
If the ``bare`` IS set:
Runs the commands without tox and without creating a new virtualenv.
This can be useful if you're using Conda and have a dependency only available through Anaconda.
It's also often faster.
This command is for convenience and isn't very customizable.
In this case, runs:
- tyrannosaurus sync
- poetry lock
- pre-commit run check-toml
- pre-commit run check-yaml
- pre-commit run check-json
- poetry check
- poetry build
- poetry install -v
- poetry run pytest --cov
- poetry run flake8 tyrannosaurus
- poetry run flake8 docs
- poetry run flake8 --ignore=D100,D101,D102,D103,D104,S101 tests
- sphinx-build -b html docs docs/html
- tyrannosaurus clean
- pip install .
z----------------------------------------z
Args:
bare: Do not use tox or virtualenv. See above.
dry_run: Just output the commands to stdout (don't run them). Useful for making a script template.
verbose: Output more information
"""
state = CliState(dry_run=dry_run, verbose=verbose)
CliCommands.build_internal(bare=bare, dry=state.dry_run)
@staticmethod
def build_internal(bare: bool = False, dry: bool = False) -> Sequence[str]:
split | |
<gh_stars>1-10
###
# Copyright (c) 2002-2005 <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import re
import copy
import time
import random
import supybot.log as log
import supybot.conf as conf
import supybot.utils as utils
import supybot.world as world
import supybot.ircdb as ircdb
import supybot.ircmsgs as ircmsgs
import supybot.ircutils as ircutils
from utils.str import rsplit
from utils.iter import imap, chain, cycle
from utils.structures import queue, smallqueue, RingBuffer
###
# The base class for a callback to be registered with an Irc object. Shows
# the required interface for callbacks -- name(),
# inFilter(irc, msg), outFilter(irc, msg), and __call__(irc, msg) [used so as
# to make functions used as callbacks conceivable, and so if refactoring ever
# changes the nature of the callbacks from classes to functions, syntactical
# changes elsewhere won't be required.]
###
class IrcCommandDispatcher(object):
"""Base class for classes that must dispatch on a command."""
def dispatchCommand(self, command):
"""Given a string 'command', dispatches to doCommand."""
return getattr(self, 'do' + command.capitalize(), None)
class IrcCallback(IrcCommandDispatcher):
"""Base class for standard callbacks.
Callbacks derived from this class should have methods of the form
"doCommand" -- doPrivmsg, doNick, do433, etc. These will be called
on matching messages.
"""
callAfter = ()
callBefore = ()
__metaclass__ = log.MetaFirewall
__firewalled__ = {'die': None,
'reset': None,
'__call__': None,
'inFilter': lambda self, irc, msg: msg,
'outFilter': lambda self, irc, msg: msg,
'name': lambda self: self.__class__.__name__,
'callPrecedence': lambda self, irc: ([], []),
}
def __init__(self, *args, **kwargs):
#object doesn't take any args, so the buck stops here.
#super(IrcCallback, self).__init__(*args, **kwargs)
pass
def __repr__(self):
return '<%s %s %s>' % \
(self.__class__.__name__, self.name(), object.__repr__(self))
def name(self):
"""Returns the name of the callback."""
return self.__class__.__name__
def callPrecedence(self, irc):
"""Returns a pair of (callbacks to call before me,
callbacks to call after me)"""
after = []
before = []
for name in self.callBefore:
cb = irc.getCallback(name)
if cb is not None:
after.append(cb)
for name in self.callAfter:
cb = irc.getCallback(name)
if cb is not None:
before.append(cb)
assert self not in after, '%s was in its own after.' % self.name()
assert self not in before, '%s was in its own before.' % self.name()
return (before, after)
def inFilter(self, irc, msg):
"""Used for filtering/modifying messages as they're entering.
ircmsgs.IrcMsg objects are immutable, so this method is expected to
return another ircmsgs.IrcMsg object. Obviously the same IrcMsg
can be returned.
"""
return msg
def outFilter(self, irc, msg):
"""Used for filtering/modifying messages as they're leaving.
As with inFilter, an IrcMsg is returned.
"""
return msg
def __call__(self, irc, msg):
"""Used for handling each message."""
method = self.dispatchCommand(msg.command)
if method is not None:
method(irc, msg)
def reset(self):
"""Resets the callback. Called when reconnecting to the server."""
pass
def die(self):
"""Makes the callback die. Called when the parent Irc object dies."""
pass
###
# Basic queue for IRC messages. It doesn't presently (but should at some
# later point) reorder messages based on priority or penalty calculations.
###
_high = frozenset(['MODE', 'KICK', 'PONG', 'NICK', 'PASS', 'CAPAB'])
_low = frozenset(['PRIVMSG', 'PING', 'WHO', 'NOTICE', 'JOIN'])
class IrcMsgQueue(object):
"""Class for a queue of IrcMsgs. Eventually, it should be smart.
Probably smarter than it is now, though it's gotten quite a bit smarter
than it originally was. A method to "score" methods, and a heapq to
maintain a priority queue of the messages would be the ideal way to do
intelligent queuing.
As it stands, however, we simply keep track of 'high priority' messages,
'low priority' messages, and normal messages, and just make sure to return
the 'high priority' ones before the normal ones before the 'low priority'
ones.
"""
__slots__ = ('msgs', 'highpriority', 'normal', 'lowpriority', 'lastJoin')
def __init__(self, iterable=()):
self.reset()
for msg in iterable:
self.enqueue(msg)
def reset(self):
"""Clears the queue."""
self.lastJoin = 0
self.highpriority = smallqueue()
self.normal = smallqueue()
self.lowpriority = smallqueue()
def enqueue(self, msg):
"""Enqueues a given message."""
if msg in self and \
conf.supybot.protocols.irc.queuing.duplicates():
s = str(msg).strip()
log.info('Not adding message %q to queue, already added.', s)
return False
else:
if msg.command in _high:
self.highpriority.enqueue(msg)
elif msg.command in _low:
self.lowpriority.enqueue(msg)
else:
self.normal.enqueue(msg)
return True
def dequeue(self):
"""Dequeues a given message."""
msg = None
if self.highpriority:
msg = self.highpriority.dequeue()
elif self.normal:
msg = self.normal.dequeue()
elif self.lowpriority:
msg = self.lowpriority.dequeue()
if msg.command == 'JOIN':
limit = conf.supybot.protocols.irc.queuing.rateLimit.join()
now = time.time()
if self.lastJoin + limit <= now:
self.lastJoin = now
else:
self.lowpriority.enqueue(msg)
msg = None
return msg
def __contains__(self, msg):
return msg in self.normal or \
msg in self.lowpriority or \
msg in self.highpriority
def __nonzero__(self):
return bool(self.highpriority or self.normal or self.lowpriority)
def __len__(self):
return len(self.highpriority)+len(self.lowpriority)+len(self.normal)
def __repr__(self):
name = self.__class__.__name__
return '%s(%r)' % (name, list(chain(self.highpriority,
self.normal,
self.lowpriority)))
__str__ = __repr__
###
# Maintains the state of IRC connection -- the most recent messages, the
# status of various modes (especially ops/halfops/voices) in channels, etc.
###
class ChannelState(utils.python.Object):
__slots__ = ('users', 'ops', 'halfops', 'bans',
'voices', 'topic', 'modes', 'created')
def __init__(self):
self.topic = ''
self.created = 0
self.ops = ircutils.IrcSet()
self.bans = ircutils.IrcSet()
self.users = ircutils.IrcSet()
self.voices = ircutils.IrcSet()
self.halfops = ircutils.IrcSet()
self.modes = ircutils.IrcDict()
def isOp(self, nick):
return nick in self.ops
def isVoice(self, nick):
return nick in self.voices
def isHalfop(self, nick):
return nick in self.halfops
def addUser(self, user):
"Adds a given user to the ChannelState. Power prefixes are handled."
nick = user.lstrip('@%+&~!')
if not nick:
return
# & is used to denote protected users in UnrealIRCd
# ~ is used to denote channel owner in UnrealIRCd
# ! is used to denote protected users in UltimateIRCd
while user and user[0] in '@%+&~!':
(marker, user) = (user[0], user[1:])
assert user, 'Looks like my caller is passing chars, not nicks.'
if marker in '@&~!':
self.ops.add(nick)
elif marker == '%':
self.halfops.add(nick)
elif marker == '+':
self.voices.add(nick)
self.users.add(nick)
def replaceUser(self, oldNick, newNick):
"""Changes the user oldNick to newNick; used for NICK changes."""
# Note that this doesn't have to have the sigil (@%+) that users
# have to have for addUser; it just changes the name of the user
# without changing any of his categories.
for s in (self.users, self.ops, self.halfops, self.voices):
if oldNick in s:
s.remove(oldNick)
s.add(newNick)
def removeUser(self, user):
"""Removes a given user from the channel."""
self.users.discard(user)
self.ops.discard(user)
self.halfops.discard(user)
self.voices.discard(user)
def setMode(self, mode, value=None):
assert mode not in 'ovhbeq'
self.modes[mode] = value
def unsetMode(self, mode):
assert mode not in 'ovhbeq'
if mode in self.modes:
del self.modes[mode]
def doMode(self, msg):
def getSet(c):
if c == 'o':
Set = self.ops
elif c == 'v':
Set = self.voices
elif c == 'h':
Set = self.halfops
elif c == 'b':
Set = self.bans
else: # We don't care yet, so we'll just return an empty set.
Set = set()
return Set
for (mode, value) in ircutils.separateModes(msg.args[1:]):
(action, modeChar) = mode
if modeChar in 'ovhbeq': # We don't handle e or q yet.
Set | |
IP Network" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet)
print("\n")
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet)
print("\n")
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet)
print("\n" * 1)
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + blok_subnet+ 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet + blok_subnet - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet + blok_subnet)
print("\n")
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet)
print("\n")
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet)
print("\n")
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet + blok_subnet)
print("\n" * 1)
elif cari_netmask == 28:
x = 4
y = 4
jumlah_subnet = 2 ** x
total_host = 2 ** y
host_per_subnet = 2 ** y - 2
blok_subnet = 256 - angka_x_pertama_oktet_terakhir - angka_x_kedua_oktet_terakhir - angka_x_ketiga_oktet_terakhir - angka_x_keempat_oktet_terakhir
ip_network = oktet_4_ipaddress - oktet_4_ipaddress
ip_broadcast = 255 - angka_x_pertama_oktet_terakhir - angka_x_kedua_oktet_terakhir - angka_x_ketiga_oktet_terakhir - angka_x_keempat_oktet_terakhir
rumus = "Banyaknya x = 2"
netmask = angka_x_pertama_oktet_terakhir + angka_x_kedua_oktet_terakhir + angka_x_ketiga_oktet_terakhir + angka_x_keempat_oktet_terakhir
print(Fore.LIGHTGREEN_EX + " IP Address" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, oktet_4_ipaddress, "/28")
print(
Fore.LIGHTGREEN_EX + " Binary" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE +
" 11111111.11111111.11111111.11110000")
print(
Fore.LIGHTGREEN_EX + " Netmask" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE +
" 255 255 255",
netmask)
print("\n")
print(Fore.LIGHTGREEN_EX + " Jumlah Subnet" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", jumlah_subnet,
"[0,16,32,48,64,80,96,112,128,144,160,176,192,208,224,240]")
print(Fore.LIGHTGREEN_EX + " Jumlah Total Host" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", total_host)
print(Fore.LIGHTGREEN_EX + " Jumlah Host Per Subnet" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", host_per_subnet)
print(Fore.LIGHTGREEN_EX + " Blok Subnet" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", blok_subnet)
print("\n")
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast)
print("\n")
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet)
print("\n")
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet)
print("\n")
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + 1)
print(Fore.LIGHTGREEN_EX + " IP Terakhir" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet - 1)
print(Fore.LIGHTGREEN_EX + " IP Broadcast" + Fore.LIGHTBLUE_EX + " >>>"+Fore.WHITE+" ", oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_broadcast + blok_subnet + blok_subnet + blok_subnet)
print("\n" * 1)
print(Fore.LIGHTGREEN_EX + " IP Network" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + blok_subnet)
print(Fore.LIGHTGREEN_EX + " IP Pertama" + Fore.LIGHTBLUE_EX + " >>>" + Fore.WHITE + " ",
oktet_1_ipaddress,
oktet_2_ipaddress, oktet_3_ipaddress, ip_network + blok_subnet + blok_subnet + blok_subnet + blok_subnet+ 1)
print(Fore.LIGHTGREEN_EX | |
<reponame>SorooshMani-NOAA/pyschism
from abc import ABC
from collections import defaultdict
from functools import lru_cache
import hashlib
import logging
from itertools import permutations
import os
import pathlib
import tempfile
from typing import Union, Sequence, Hashable, List, Dict
import geopandas as gpd
from matplotlib.collections import PolyCollection
from matplotlib.path import Path
from matplotlib.tri import Triangulation
from matplotlib.transforms import Bbox
import numpy as np
from pyproj import Transformer, CRS
import requests
from shapely import ops
from shapely.geometry import (
box,
LinearRing,
LineString,
MultiPolygon,
Polygon,
Point,
)
from pyschism.mesh.parsers import grd, sms2dm
from pyschism.figures import figure
logger = logging.getLogger(__name__)
class Nodes:
def __init__(self, nodes: Dict[Hashable, List[List]], crs=None):
"""Setter for the nodes attribute.
Argument nodes must be of the form:
{id: [(x0, y0), z0]}
or
{id: [(x0, y0), [z0, ..., zn]}
Gr3 format is assumed to be exclusively a 2D format that can hold
triangles or quads.
"""
for coords, _ in nodes.values():
if len(coords) != 2:
raise ValueError(
"Coordinate vertices for a gr3 type must be 2D, but got "
f"coordinates {coords}."
)
self._id = list(nodes.keys())
self._coords = np.array([coords for coords, _ in nodes.values()])
self._crs = CRS.from_user_input(crs) if crs is not None else crs
self._values = np.array([value for _, value in nodes.values()])
def transform_to(self, dst_crs):
dst_crs = CRS.from_user_input(dst_crs)
if not self.crs.equals(dst_crs):
self._coords = self.get_xy(dst_crs)
self._crs = dst_crs
if hasattr(self, "_gdf"):
del self._gdf
def transform_to_cpp(self, lonc, latc):
longitude = list(self.coord[:, 0]/180*np.pi)
latitude = list(self.coord[:, 1]/180*np.pi)
radius = 6378206.4
loncc = lonc / 180 * np.pi
latcc = latc / 180 * np.pi
x = [
radius * (longitude[i] - loncc) * np.cos(latcc)
for i in np.arange(len(longitude))
]
y = [radius * latitude[i] for i in np.arange(len(latitude))]
self._coords = np.vstack([x, y]).T
self._crs = None
return x, y
def get_xy(self, crs: Union[CRS, str] = None):
if crs is not None:
crs = CRS.from_user_input(crs)
if not crs.equals(self.crs):
transformer = Transformer.from_crs(self.crs, crs, always_xy=True)
x, y = transformer.transform(self.coord[:, 0], self.coord[:, 1])
return np.vstack([x, y]).T
return self.coord
@property
def gdf(self):
if not hasattr(self, "_gdf"):
data = []
for id, coord, values in zip(self._id, self._coords, self.values):
data.append({"geometry": Point(coord), "id": id, "values": values})
self._gdf = gpd.GeoDataFrame(data, crs=self.crs)
return self._gdf
@property
def id(self):
return self._id
@property
def index(self):
if not hasattr(self, "_index"):
self._index = np.arange(len(self._id))
return self._index
@property
def crs(self):
return self._crs
@property
def values(self):
return self._values
@property
def coords(self):
return self._coords
@property
def coord(self):
return self.coords
def get_index_by_id(self, id: Hashable):
if not hasattr(self, "node_id_to_index"):
self.node_id_to_index = {self.id[i]: i for i in range(len(self.id))}
return self.node_id_to_index[id]
def get_id_by_index(self, index: int):
if not hasattr(self, "node_index_to_id"):
self.node_index_to_id = {i: self.id[i] for i in range(len(self.id))}
return self.node_index_to_id[index]
def to_dict(self):
nodes = {
nid: (coo, val)
for nid, coo, val in zip(self._id, self._coords, self.values)
}
return nodes
class Elements:
def __init__(self, nodes: Nodes, elements: Dict[Hashable, Sequence]):
if not isinstance(elements, dict):
raise TypeError("Argument elements must be a dict.")
vertex_id_set = set(nodes.id)
for id, geom in elements.items():
if not isinstance(geom, Sequence):
raise TypeError(
f"Element with id {id} of the elements "
f"argument must be of type {Sequence}, not "
f"type {type(geom)}."
)
if not set(geom).issubset(vertex_id_set):
ValueError(
f"Element with id {id} is not a subset of the " "coordinate id's."
)
self.nodes = nodes
self.elements = elements
def __len__(self):
return len(self.elements)
def to_dict(self):
return self.elements
@property
def id(self):
if not hasattr(self, "_id"):
self._id = list(self.elements.keys())
return self._id
@property
def index(self):
if not hasattr(self, "_index"):
self._index = np.arange(len(self.elements))
return self._index
def get_index_by_id(self, id: Hashable):
if not hasattr(self, "element_id_to_index"):
self.element_id_to_index = {self.id[i]: i for i in range(len(self.id))}
return self.element_id_to_index[id]
def get_id_by_index(self, index: int):
if not hasattr(self, "element_index_to_id"):
self.element_index_to_id = {i: self.id[i] for i in range(len(self.id))}
return self.element_index_to_id[index]
def get_indexes_around_index(self, index):
if not hasattr(self, "indexes_around_index"):
def append_geom(geom):
for simplex in geom:
for i, j in permutations(simplex, 2):
indexes_around_index[i].add(j)
indexes_around_index = defaultdict(set)
append_geom(self.triangles)
append_geom(self.quads)
self.indexes_around_index = indexes_around_index
return list(self.indexes_around_index[index])
def get_ball(self, order: int, id=None, index=None):
if not isinstance(order, int):
raise TypeError("Argument 'order' must be of type int.")
if not order >= 0:
raise TypeError("Argument 'order' must be of greater " "than zero.")
if id is None and index is None:
raise ValueError("Must specify one keyword argument of index or id.")
if id is not None and index is not None:
raise ValueError(
"Must specify only one keyword argument of " "index or id."
)
if id is not None:
index = self.get_index_by_id(id)
eidxs = set([index])
for i in range(order):
elements = self.array[list(sorted(eidxs)), :]
new_neighbors = list(
map(self.get_indexes_around_index, list(set(elements.data.flatten())))
)
new_neighbors = set([item for sublist in new_neighbors for item in sublist])
eidxs = eidxs.union(
set(
np.where(
np.logical_and(
np.any(
np.isin(self.array, list(set(new_neighbors))), axis=1
),
np.any(np.isin(self.array, elements), axis=1),
)
)[0]
)
)
return self.gdf.loc[eidxs].geometry.unary_union.exterior
def get_node_ball(self):
'''
compute nodal ball information
'''
elnode = self.array
NP = len(self.nodes.values)
nne = np.zeros(NP).astype('int')
ine = [[] for i in np.arange(NP)]
mask = np.any(elnode.mask)
for i, element in enumerate(elnode):
ele = element[~element.mask]
if mask:
i34 = len(ele)
else:
i34 = len(ele[0])
inds = elnode[i, :i34]
nne[inds] = nne[inds]+1
[ine[indi].append(i) for indi in inds]
ine = np.array([np.array(ine[i]) for i in np.arange(NP)], dtype='O')
return nne, ine
def get_triangulation_mask(self, element_mask):
triangulation_mask = []
for i, element in enumerate(self.elements.values()):
if len(element) == 3:
if element_mask[i]:
triangulation_mask.append(True)
else:
triangulation_mask.append(False)
for i, element in enumerate(self.elements.values()):
if len(element) == 4:
if element_mask[i]:
triangulation_mask.append(True)
triangulation_mask.append(True)
else:
triangulation_mask.append(False)
triangulation_mask.append(False)
return np.array(triangulation_mask)
def get_areas(self):
if self.nodes.crs.is_geographic:
elements = []
for row in self.gdf.itertuples():
aeqd = CRS.from_user_input(
f"+proj=aeqd +R=6371000 +units=m "
f"+lat_0={row.geometry.centroid.y} +lon_0={row.geometry.centroid.x}"
)
current_to_aeqd = Transformer.from_crs(
self.nodes.crs, aeqd, always_xy=True
).transform
elements.append(ops.transform(current_to_aeqd, row.geometry))
return [element.area for element in elements]
else:
return [row.geometry.area for row in self.gdf.itertuples()]
@property
def array(self):
if not hasattr(self, "_array"):
rank = int(max(map(len, self.elements.values())))
array = np.full((len(self.elements), rank), -1)
for i, element in enumerate(self.elements.values()):
row = np.array(list(map(self.nodes.get_index_by_id, element)))
array[i, : len(row)] = row
array = np.ma.masked_equal(array, -1)
self._array = array
return self._array
@property
def triangles(self):
if not hasattr(self, "_triangles"):
self._triangles = np.array(
[
list(map(self.nodes.get_index_by_id, element))
for element in self.elements.values()
if len(element) == 3
]
)
return self._triangles
@property
def tri_idxs(self):
if not hasattr(self, "_tri_idxs"):
tri_idxs = np.full(len(self.elements), -99, dtype=int)
for i, element in enumerate(self.elements.values()):
if len(element) == 3:
tri_idxs[i] = i
new_arr = np.delete(tri_idxs, np.where(tri_idxs == -99))
self._tri_idxs = new_arr
return self._tri_idxs
@property
def quadrilaterals(self):
return self.quads
@property
def quads(self):
if not hasattr(self, "_quads"):
self._quads = np.array(
[
list(map(self.nodes.get_index_by_id, element))
for element in self.elements.values()
if len(element) == 4
]
)
return self._quads
@property
def qua_idxs(self):
if not hasattr(self, "_qua_idxs"):
qua_idxs = np.full(len(self.elements), -99, dtype=int)
for i, element in enumerate(self.elements.values()):
if len(element) == 4:
qua_idxs[i] = i
new_arr = np.delete(qua_idxs, np.where(qua_idxs == -99))
self._qua_idxs = new_arr
return self._qua_idxs
@property
def sides(self):
if not hasattr(self, "_sides"):
sides = []
for element in self.elements.values():
if len(element) == 3:
results = list(map(self.nodes.get_index_by_id, element))
# print(len(results))
# print(f'nodes are {results[0]}, {results[1]}, {results[2]}')
sides.append([results[1], results[2]])
sides.append([results[2], results[0]])
sides.append([results[0], results[1]])
elif len(element) == 4:
results = list(map(self.nodes.get_index_by_id, element))
# print(f'nodes are {p1}, {p2}, {p3}, {p4}')
sides.append([results[1], results[2]])
sides.append([results[2], results[3]])
sides.append([results[3], results[0]])
sides.append([results[0], results[1]])
# from pyPoseidon
def remove_reversed_duplicates(iterable):
# Create a set for already seen elements
seen = set()
for item in iterable:
# Lists are mutable so we need tuples for the set-operations.
tup = tuple(item)
if tup not in seen:
# If the tuple is not in the set append it in REVERSED order.
seen.add(tup[::-1])
# If you also want to remove normal duplicates uncomment the next line
# seen.add(tup)
yield item
self._sides = np.array(list(remove_reversed_duplicates(sides)))
return self._sides
@property
def triangulation(self):
if not hasattr(self, "_triangulation"):
triangles = self.triangles.tolist()
for quad in self.quads:
triangles.append([quad[0], quad[1], quad[3]])
triangles.append([quad[1], quad[2], quad[3]])
self._triangulation = Triangulation(
self.nodes.coord[:, 0], self.nodes.coord[:, 1], triangles
)
return self._triangulation
@property
def gdf(self):
if not hasattr(self, "_gdf"):
logger.info("Generating elements geodataframe.")
from time import time
start = time()
data = []
for id, element in self.elements.items():
data.append(
{
"geometry": Polygon(
self.nodes.coord[list(map(self.get_index_by_id, element))]
),
"id": id,
}
)
self._gdf = gpd.GeoDataFrame(data, crs=self.nodes.crs)
logger.info(
"Generating elements geodataframe took " f"{time()-start} seconds."
)
return self._gdf
class Edges:
def __init__(self, grd: "Gr3"):
self.gr3 = grd
@lru_cache(maxsize=1)
def __call__(self) -> gpd.GeoDataFrame:
data = []
for ring in self.gr3.hull.rings().itertuples():
coords = ring.geometry.coords
for i in range(1, len(coords)):
data.append(
{
"geometry": LineString([coords[i - 1], | |
import collections
import functools
import os
import pickle
from typing import (Callable, Dict, Hashable, List, NamedTuple, Optional,
Sequence, Union)
import numpy as np
from stable_baselines.common.base_class import BaseRLModel
from stable_baselines.common.policies import BasePolicy
from stable_baselines.common.vec_env import VecEnv
import tensorflow as tf
from imitation.policies.base import get_action_policy
from imitation.util.reward_wrapper import RewardFn
class Trajectory(NamedTuple):
"""A trajectory, e.g. a one episode rollout from an expert policy.
Attributes:
acts: Actions, shape (trajectory_len, ) + action_shape.
obs: Observations, shape (trajectory_len+1, ) + observation_shape.
rews: Reward, shape (trajectory_len, ).
infos: A list of info dicts, length (trajectory_len, ).
"""
acts: np.ndarray
obs: np.ndarray
rews: np.ndarray
infos: Optional[List[dict]]
def unwrap_traj(traj: Trajectory) -> Trajectory:
"""Uses `MonitorPlus`-captured `obs` and `rews` to replace fields.
This can be useful for bypassing other wrappers to retrieve the original
`obs` and `rews`.
Fails if `infos` is None or if the Trajectory was generated from an
environment without imitation.util.MonitorPlus.
Args:
traj: A Trajectory generated from `MonitorPlus`-wrapped Environments.
Returns:
A copy of `traj` with replaced `obs` and `rews` fields.
"""
ep_info = traj.infos[-1]["episode"]
res = traj._replace(obs=ep_info["obs"], rews=ep_info["rews"])
assert len(res.obs) == len(res.acts) + 1
assert len(res.rews) == len(res.acts)
return res
def recalc_rewards_traj(traj: Trajectory, reward_fn: RewardFn) -> np.ndarray:
"""Returns the rewards of the trajectory calculated under a diff reward fn."""
steps = np.arange(len(traj.rews))
return reward_fn(traj.obs[:-1], traj.acts, traj.obs[1:], steps)
class Transitions(NamedTuple):
"""A batch of obs-act-obs-rew-done transitions.
Usually generated by combining and processing several Trajectories via
`flatten_trajectories()`.
Attributes:
obs: Previous observations. Shape: (batch_size, ) + observation_shape.
The i'th observation `obs[i]` in this array is the observation seen
by the agent when choosing action `act[i]`.
act: Actions. Shape: (batch_size, ) + action_shape.
next_obs: New observation. Shape: (batch_size, ) + observation_shape.
The i'th observation `next_obs[i]` in this array is the observation
after the agent has taken action `act[i]`.
rew: Reward. Shape: (batch_size, ).
The reward `rew[i]` at the i'th timestep is received after the agent has
taken action `act[i]`.
done: Boolean array indicating episode termination. Shape: (batch_size, ).
`done[i]` is true iff `next_obs[i]` the last observation of an episode.
"""
obs: np.ndarray
acts: np.ndarray
next_obs: np.ndarray
rews: np.ndarray
dones: np.ndarray
class TrajectoryAccumulator:
"""Accumulates trajectories step-by-step.
Useful for collecting completed trajectories while ignoring
partially-completed trajectories (e.g. when rolling out a VecEnv to collect a
set number of transitions). Each in-progress trajectory is identified by a
'key', which enables several independent trajectories to be collected at
once. They key can also be left at its default value of `None` if you only
wish to collect one trajectory."""
def __init__(self):
"""Initialise the trajectory accumulator."""
self.partial_trajectories = collections.defaultdict(list)
def add_step(self, step_dict: Dict[str, np.ndarray], key: Hashable = None):
"""Add a single step to the partial trajectory identified by `key`.
Generally a single step could correspond to, e.g., one environment managed
by a VecEnv.
Args:
step_dict: dictionary containing information for the current step. Its
keys could include any (or all) attributes of a `Trajectory` (e.g.
"obs", "acts", etc.).
key: key to uniquely identify the trajectory to append to, if working
with multiple partial trajectories."""
self.partial_trajectories[key].append(step_dict)
def finish_trajectory(self, key: Hashable = None) -> Trajectory:
"""Complete the trajectory labelled with `key`.
Args:
key: key uniquely identifying which in-progress trajectory to remove.
Returns:
traj: list of completed trajectories popped from
`self.partial_trajectories`."""
part_dicts = self.partial_trajectories[key]
del self.partial_trajectories[key]
out_dict_unstacked = collections.defaultdict(list)
for part_dict in part_dicts:
for key, array in part_dict.items():
out_dict_unstacked[key].append(array)
out_dict_stacked = {
key: np.stack(arr_list, axis=0)
for key, arr_list in out_dict_unstacked.items()
}
traj = Trajectory(**out_dict_stacked)
assert traj.rews.shape[0] == traj.acts.shape[0] == traj.obs.shape[0] - 1
return traj
def add_steps_and_auto_finish(self,
acts: np.ndarray,
obs: np.ndarray,
rews: np.ndarray,
dones: np.ndarray,
infos: List[dict]) -> List[Trajectory]:
"""Calls `add_step` repeatedly using acts and the returns from `venv.step`.
Also automatically calls `finish_trajectory()` for each `done == True`.
Before calling this method, each environment index key needs to be
initialized with the initial observation (usually from `venv.reset()`).
See the body of `util.rollout.generate_trajectory` for an example.
Args:
acts: Actions passed into `VecEnv.step()`.
obs: Return value from `VecEnv.step(acts)`.
rews: Return value from `VecEnv.step(acts)`.
dones: Return value from `VecEnv.step(acts)`.
infos: Return value from `VecEnv.step(acts)`.
Returns:
A list of completed trajectories. There should be one Trajectory for
each `True` in the `dones` argument.
"""
trajs = []
for env_idx in range(len(obs)):
assert env_idx in self.partial_trajectories
assert list(self.partial_trajectories[env_idx][0].keys()) == ["obs"], (
"Need to first initialize partial trajectory using "
"self._traj_accum.add_step({'obs': ob}, key=env_idx)")
zip_iter = enumerate(zip(acts, obs, rews, dones, infos))
for env_idx, (act, ob, rew, done, info) in zip_iter:
if done:
# actual obs is inaccurate, so we use the one inserted into step info
# by stable baselines wrapper
real_ob = info['terminal_observation']
else:
real_ob = ob
self.add_step(
dict(
acts=act,
rews=rew,
# this is not the obs corresponding to `act`, but rather the obs
# *after* `act` (see above)
obs=real_ob,
infos=info),
env_idx)
if done:
# finish env_idx-th trajectory
new_traj = self.finish_trajectory(env_idx)
trajs.append(new_traj)
self.add_step(dict(obs=ob), env_idx)
return trajs
GenTrajTerminationFn = Callable[[Sequence[Trajectory]], bool]
def min_episodes(n: int) -> GenTrajTerminationFn:
"""Terminate after collecting n episodes of data.
Argument:
n: Minimum number of episodes of data to collect.
May overshoot if two episodes complete simultaneously (unlikely).
Returns:
A function implementing this termination condition.
"""
assert n >= 1
return lambda trajectories: len(trajectories) >= n
def min_timesteps(n: int) -> GenTrajTerminationFn:
"""Terminate at the first episode after collecting n timesteps of data.
Arguments:
n: Minimum number of timesteps of data to collect.
May overshoot to nearest episode boundary.
Returns:
A function implementing this termination condition.
"""
assert n >= 1
def f(trajectories: Sequence[Trajectory]):
timesteps = sum(len(t.obs) - 1 for t in trajectories)
return timesteps >= n
return f
def make_sample_until(n_timesteps: Optional[int],
n_episodes: Optional[int],
) -> GenTrajTerminationFn:
"""Returns a termination condition sampling until n_timesteps or n_episodes.
Arguments:
n_timesteps: Minimum number of timesteps to sample.
n_episodes: Number of episodes to sample.
Returns:
A termination condition.
Raises:
ValueError if both or neither of n_timesteps and n_episodes are set,
or if either are non-positive.
"""
if n_timesteps is not None and n_episodes is not None:
raise ValueError("n_timesteps and n_episodes were both set")
elif n_timesteps is not None:
assert n_timesteps > 0
return min_timesteps(n_timesteps)
elif n_episodes is not None:
assert n_episodes > 0
return min_episodes(n_episodes)
else:
raise ValueError("Set at least one of n_timesteps and n_episodes")
def generate_trajectories(policy,
venv: VecEnv,
sample_until: GenTrajTerminationFn,
*,
deterministic_policy: bool = False,
) -> Sequence[Trajectory]:
"""Generate trajectory dictionaries from a policy and an environment.
Args:
policy (BasePolicy or BaseRLModel): A stable_baselines policy or RLModel,
trained on the gym environment.
venv: The vectorized environments to interact with.
sample_until: A function determining the termination condition.
It takes a sequence of trajectories, and returns a bool.
Most users will want to use one of `min_episodes` or `min_timesteps`.
deterministic_policy: If True, asks policy to deterministically return
action. Note the trajectories might still be non-deterministic if the
environment has non-determinism!
Returns:
Sequence of `Trajectory` named tuples.
"""
if isinstance(policy, BaseRLModel):
get_action = policy.predict
policy.set_env(venv)
else:
get_action = functools.partial(get_action_policy, policy)
# Collect rollout tuples.
trajectories = []
# accumulator for incomplete trajectories
trajectories_accum = TrajectoryAccumulator()
obs = venv.reset()
for env_idx, ob in enumerate(obs):
# Seed with first obs only. Inside loop, we'll only add second obs from
# each (s,a,r,s') tuple, under the same "obs" key again. That way we still
# get all observations, but they're not duplicated into "next obs" and
# "previous obs" (this matters for, e.g., Atari, where observations are
# really big).
trajectories_accum.add_step(dict(obs=ob), env_idx)
while not sample_until(trajectories):
acts, _ = get_action(obs, deterministic=deterministic_policy)
obs, rews, dones, infos = venv.step(acts)
new_trajs = trajectories_accum.add_steps_and_auto_finish(
acts, obs, rews, dones, infos)
trajectories.extend(new_trajs)
# Note that we just drop partial trajectories. This is not ideal for some
# algos; e.g. BC can probably benefit from partial trajectories, too.
# Sanity checks.
for trajectory in trajectories:
n_steps = len(trajectory.acts)
# extra 1 for the end
exp_obs = (n_steps + 1, ) + venv.observation_space.shape
real_obs = trajectory.obs.shape
assert real_obs == exp_obs, f"expected shape {exp_obs}, got {real_obs}"
exp_act = (n_steps, ) + venv.action_space.shape
real_act = trajectory.acts.shape
assert real_act == exp_act, f"expected shape {exp_act}, got {real_act}"
exp_rew = (n_steps,)
real_rew = trajectory.rews.shape
assert real_rew == exp_rew, f"expected shape {exp_rew}, got {real_rew}"
| |
coming to your house"""
member = member or ctx.author
await ctx.trigger_typing()
parameters = {
"avatar" : str(member.avatar_url_as(format="png", size=1024))
}
session = self.acquire_session()
async with session.get(f'https://some-random-api.ml/canvas/lolice', params=parameters) as resp:
imageData = io.BytesIO(await resp.read()) # read the image/bytes
img = discord.File(imageData, "lolice.png")
em = discord.Embed(
color=0xf1f1f1,
)
em.set_image(url="attachment://lolice.png")
await ctx.reply(embed=em,file=img) # sending the file
@commands.command(aliases=["sputid"])
async def stupid(self, ctx, member: libneko.converters.InsensitiveMemberConverter=None, * ,text: str = "im stupid"):
"""Oh no its stupid"""
member = member or ctx.author
await ctx.trigger_typing()
parameters = {
"avatar" : str(member.avatar_url_as(format="png", size=1024)),
"dog" : text
}
session = self.acquire_session()
async with session.get(f'https://some-random-api.ml/canvas/its-so-stupid', params=parameters) as resp:
imageData = io.BytesIO(await resp.read()) # read the image/bytes
img = discord.File(imageData, "stupid.png")
em = discord.Embed(
color=0xf1f1f1,
)
em.set_image(url="attachment://stupid.png")
await ctx.reply(embed=em,file=img) # sending the file
@commands.command()
async def gay(self, ctx, member: libneko.converters.InsensitiveMemberConverter=None):
"""gay-laser"""
member = member or ctx.author
await ctx.trigger_typing()
parameters = {
"avatar" : str(member.avatar_url_as(format="png", size=1024))
}
session = self.acquire_session()
async with session.get(f'https://some-random-api.ml/canvas/gay', params=parameters) as resp:
imageData = io.BytesIO(await resp.read()) # read the image/bytes
img = discord.File(imageData, "gay.png")
em = discord.Embed(
color=0xf1f1f1,
)
em.set_image(url="attachment://gay.png")
await ctx.reply(embed=em,file=img) # sending the file
@commands.command(aliases=["ussr"])
async def comrade(self, ctx, member: libneko.converters.InsensitiveMemberConverter=None):
member = member or ctx.author
await ctx.trigger_typing()
parameters = {
"avatar" : str(member.avatar_url_as(format="png", size=1024))
}
session = self.acquire_session()
async with session.get(f'https://some-random-api.ml/canvas/comrade', params=parameters) as resp:
imageData = io.BytesIO(await resp.read()) # read the image/bytes
img = discord.File(imageData, "comrade.png")
em = discord.Embed(
color=0xf1f1f1,
)
em.set_image(url="attachment://comrade.png")
await ctx.reply(embed=em,file=img) # sending the file
@commands.command(aliases=["ytc"])
async def ytcomment(self, ctx, member: libneko.converters.InsensitiveMemberConverter=None, * ,msg: str = "Never gonna give you up!"):
"""Create a fake youtube comment"""
member = member or ctx.author
await ctx.trigger_typing()
parameters = {
"avatar" : str(member.avatar_url_as(format="png", size=1024)),
"username" : member.name,
"comment" : msg
}
session = self.acquire_session()
async with session.get(f'https://some-random-api.ml/canvas/youtube-comment', params=parameters) as resp:
imageData = io.BytesIO(await resp.read()) # read the image/bytes
img = discord.File(imageData, "comment.png")
em = discord.Embed(
color=0xf1f1f1,
)
em.set_image(url="attachment://comment.png")
await ctx.reply(embed=em,file=img) # sending the file
@commands.command(aliases=["tw"])
async def tweet(self, ctx, member: libneko.converters.InsensitiveMemberConverter=None, * , message: str = "Never gonna give you up!"):
"""Create a fake tweet!"""
member = member or ctx.author
await ctx.trigger_typing()
parameters = {
"avatar" : str(member.avatar_url_as(format="png", size=1024)),
"username" : member.name,
"displayname" : member.display_name or member.name,
"comment" : message
}
session = self.acquire_session()
async with session.get(f'https://some-random-api.ml/canvas/tweet', params=parameters) as resp:
imageData = io.BytesIO(await resp.read()) # read the image/bytes
img = discord.File(imageData, "tweet.png")
em = discord.Embed(
color=0xf1f1f1,
)
em.set_image(url="attachment://tweet.png")
await ctx.reply(embed=em,file=img) # sending the file
@commands.command()
async def horny(self, ctx, member: libneko.converters.InsensitiveMemberConverter = None):
"""Horny card for u"""
member = member or ctx.author
await ctx.trigger_typing()
parameters = {
"avatar" : str(member.avatar_url_as(format="png", size=1024))
}
session = self.acquire_session()
async with session.get(f'https://some-random-api.ml/canvas/horny', params = parameters) as af:
if 300 > af.status >= 200:
fp = io.BytesIO(await af.read())
file = discord.File(fp, "horny.png")
em = discord.Embed(
title="bonk",
color=0xf1f1f1,
)
em.set_image(url="attachment://horny.png")
await ctx.reply(embed=em, file=file)
else:
await ctx.reply('No horny :(')
@commands.command(aliases=["adv"])
@commands.cooldown(rate=3, per=5, type=commands.BucketType.user)
async def advice(self, ctx):
"""
Get a piece of Advice!
"""
await ctx.trigger_typing()
session = self.acquire_session()
async with session.get(f'https://api.adviceslip.com/advice') as resp:
resp.raise_for_status()
data = json.loads(await resp.read(), object_hook=DictObject)
adv = data.slip.advice
emb = discord.Embed(title="Here's some advice for you :)", description=adv,
color=ctx.author.color, timestamp=datetime.utcnow())
await ctx.reply(embed=emb)
@commands.command(aliases=["randquote", "inspire", "qt"])
@commands.cooldown(rate=3, per=5, type=commands.BucketType.user)
async def quote(self, ctx):
"""
Get a random quote!
"""
await ctx.trigger_typing()
session = self.acquire_session()
async with session.get('https://quote-garden.herokuapp.com/api/v3/quotes/random') as resp:
resp.raise_for_status()
data = await resp.json()
quote = data["data"][0]["quoteText"]
author = data["data"][0]["quoteAuthor"]
emb = discord.Embed(
description=quote, color=ctx.author.color, timestamp=datetime.utcnow())
emb.set_footer(text=f"Quote by: {author}")
await ctx.reply(embed=emb)
@commands.command(aliases=["daddyjokes", "dadjoke", "djoke"])
@commands.cooldown(rate=3, per=5, type=commands.BucketType.user)
async def dadjokes(self, ctx):
"""
Send a Dad Joke (OK Boomer.)
"""
await ctx.trigger_typing()
head = {"Accept": "application/json",
"User-Agent": "KamFreBOT(Discord.py) https://github.com/kamfretoz/KamFreBOT"
}
session = self.acquire_session()
async with session.get('https://icanhazdadjoke.com/', headers=head) as resp:
session.post
resp.raise_for_status()
data = await resp.json()
jokes = data["joke"]
emb = discord.Embed(title="Dad Joke!", description=jokes,
timestamp=datetime.utcnow(), color=ctx.author.color)
emb.set_thumbnail(url="https://i.ibb.co/6WjYXsP/dad.jpg")
await ctx.reply(embed=emb)
@commands.command(aliases=["chnorris", "chnr", "cn", "chuck"])
@commands.cooldown(rate=3, per=5, type=commands.BucketType.user)
async def chucknorris(self, ctx):
"""
You Didn't run this command, Chuck Norris throw this command at your face.
"""
await ctx.trigger_typing()
session = self.acquire_session()
async with session.get('https://api.chucknorris.io/jokes/random') as resp:
resp.raise_for_status()
data = await resp.json()
joke = data["value"]
icon = data["icon_url"]
emb = discord.Embed(
description=joke, timestamp=datetime.utcnow(), color=0x8B0000)
emb.set_thumbnail(url=icon)
await ctx.reply(embed=emb)
@commands.command(aliases=["insult"])
@commands.cooldown(rate=3, per=5, type=commands.BucketType.user)
async def roast(self, ctx, member: libneko.converters.InsensitiveMemberConverter = None):
"""
Roasting simulator 2077
"""
member = member or ctx.author
await ctx.trigger_typing()
parameters = {
"lang" : "en",
"type" : "json"
}
session = self.acquire_session()
async with session.get('https://evilinsult.com/generate_insult.php', params = parameters) as resp:
resp.raise_for_status()
data = await resp.json()
insult = data["insult"]
await ctx.reply(content=f"{member.mention}, {insult}")
@commands.command(aliases=["joke"])
@commands.cooldown(rate=3, per=5, type=commands.BucketType.user)
async def jokes(self, ctx):
"""
For All kinds of jokes!
Might contain NSFW and OFfensive ones.
"""
await ctx.trigger_typing()
parameters = {
"format": "json",
"amount": 1
}
session = self.acquire_session()
async with session.get(f'https://v2.jokeapi.dev/joke/Any', params = parameters) as resp:
resp.raise_for_status()
data = await resp.json()
emb = discord.Embed(name="Here comes a joke!")
jokecategory = data["category"]
thetype = data["type"]
if thetype == "twopart":
setup = data["setup"]
delivery = data["delivery"]
emb.add_field(name=f"Category: **{jokecategory}**", value=f"{setup}\n{delivery}")
if thetype == "single":
joke = data["joke"]
emb.add_field(name=f"Category: **{jokecategory}**", value=joke)
if data["error"] == "true":
return await ctx.reply("An Error has occured!")
await ctx.reply(embed=emb, content=None)
@commands.command(aliases=["succ"], hidden=True)
async def zucc(self, ctx):
"""Gives you the zucc"""
zuccy = discord.Embed()
zuccy.set_image(
url="https://pics.me.me/he-protec-he-attac-but-most-importantly-he-zucc-28716903.png"
)
await ctx.reply(embed=zuccy, content="<:zucc:451945809144184862>")
@commands.command(hidden=True, aliases=["pelota"])
async def bola(self, ctx):
"""Bola"""
def_bola = "https://i.ibb.co/87j54jp/bola.png"
def_pelota = "https://cdn.discordapp.com/attachments/617178714173603867/743032290682077184/1597223408911.png"
if ctx.invoked_with == "pelota":
pel = discord.Embed()
pel.set_image(url=def_pelota)
await ctx.reply(embed=pel)
return
bol = discord.Embed()
bol.set_image(url=def_bola)
await ctx.reply(embed=bol)
@commands.command(hidden=True)
async def interject(self, ctx):
"""What you’re referring to as Linux, is in fact, GNyU/Linux, or as I’ve recentwy taken to cawwing it, GNyU pwus Linyux."""
uwu = discord.Embed(
description="||[Yes](https://www.youtube.com/watch?v=QXUSvSUsx80)||")
uwu.set_image(
url="https://i.ytimg.com/vi/QXUSvSUsx80/maxresdefault.jpg"
)
await ctx.reply(embed=uwu)
@commands.command(hidden=True, aliases=["banned"])
async def banido(self, ctx):
"""You are Banned!"""
ban = discord.Embed(description="You have been banned!")
ban.set_image(
url="https://media1.tenor.com/images/8a7663d1d754046373a5735fab9c14fa/tenor.gif"
)
await ctx.reply(embed=ban)
@commands.command(hidden=True, aliases=["distraction"])
async def distract(self, ctx):
"""Really?"""
dis = discord.Embed(description="You have been distracted.")
dis.set_image(
url="https://i.ibb.co/1ZHX2SZ/stickdancin.gif"
)
await ctx.reply(embed=dis)
@commands.command(hidden=True, aliases=["rw"])
async def rewind(self, ctx):
"""Rewind the time!"""
imgs = [
"https://media1.tenor.com/images/d29dc08bce25f5de5051ad2f6d3b5a99/tenor.gif",
"https://media1.tenor.com/images/3619126efbfc2d3f15eb60cabd6457ea/tenor.gif"
]
rew = discord.Embed(description="YAAAAA IT'S REWIND TIME!")
rew.set_image(
url=choice(imgs)
)
await ctx.reply(embed=rew)
@commands.cooldown(rate=1, per=10, type=commands.BucketType.guild)
@commands.command(name="curse", aliases=("oppugno", "jynx", "kutuk", "santet"))
async def emoji_curse(self, ctx, user: libneko.converters.InsensitiveMemberConverter = None, emoji: discord.Emoji = None):
"""
Curse someone with an emoji for 30 minutes
"""
if user is None and emoji is None:
await ctx.reply(embed=discord.Embed(description="Please specify who to curse and with what emoji!"))
return
if emoji is None:
await ctx.reply(embed=discord.Embed(description="Please specify what emoji to use!"))
return
if user.id == ctx.bot.user.id:
user = ctx.message.author
await ctx.reply(embed=discord.Embed(description="HA! Nice try! But unfortunately i'm immune to the curse and so the curse goes back to sender!"))
emoji = (
self.bot.get_emoji(int(emoji.split(":")[2].strip(">")))
if "<:" in emoji or "<a:" in emoji
else emoji
)
cursed = self.jynxed.get(f"{user.id}@{ctx.guild.id}")
if cursed is not None:
await ctx.channel.send(
embed=embeds.Embed(
description=f"{user.mention} is already cursed!",
color=discord.Colour.dark_purple(),
)
)
else:
try:
await ctx.message.add_reaction(emoji)
except:
await ctx.reply(
embed=embeds.Embed(
description=":octagonal_sign: Cannot find that emoji!",
color=discord.Colour.red(),
)
)
else:
def check(msg):
return ctx.guild.id == msg.guild.id and msg.author.id == user.id
async def curse_task(self):
await ctx.channel.send(
embed=embeds.Embed(
description=f":purple_heart: {user.mention} Has been cursed with {emoji}. The effect will fade away in 30 minutes.",
color=discord.Colour.purple(),
)
)
start = time.monotonic()
while time.monotonic() - start < 1800:
msg = await self.bot.wait_for("message", check=check)
try:
await msg.add_reaction(emoji)
except:
pass
del self.jynxed[f"{user.id}@{ctx.guild.id}"]
curse = self.bot.loop.create_task(curse_task(self))
self.jynxed.update({f"{user.id}@{ctx.<EMAIL>}": curse})
@commands.command(name="bless", aliases=("ruqyah", "finitincantatem", "countercurse"), hidden=False)
async def emoji_bless(self, ctx, user: libneko.converters.InsensitiveMemberConverter):
"""Cure someone from a curse"""
cursed = self.jynxed.get(f"{user.id}@{ctx.guild.id}")
if user == ctx.author and user != self.bot.creator:
await ctx.reply(
embed=embeds.Embed(
description=":octagonal_sign: You cannot counter-curse yourself",
color=discord.Colour.red(),
)
)
elif cursed is not None:
cursed.cancel()
del self.jynxed[f"{user.id}@{ctx.guild.id}"]
await ctx.reply(
embed=embeds.Embed(
description=f":green_heart: {user.mention} Has been blessed and the curse had faded away",
color=discord.Colour.from_rgb(55, 147, 105),
)
)
else:
await ctx.reply(
embed=embeds.Embed(
description=f":octagonal_sign: {user.mention} is not cursed!",
color=discord.Colour.red(),
)
)
@commands.command()
@commands.bot_has_permissions(embed_links=True, add_reactions=True)
@commands.cooldown(rate=3, per=5, type=commands.BucketType.user)
async def xkcd(self, ctx, *, entry_number: int = None):
"""Post a random xkcd comic"""
await | |
import logging
import time
from datetime import datetime as dt
import pytz
from colorfield.fields import ColorField
from django.contrib.auth.models import Group
from django.contrib.gis.db import models
from django.core.validators import MinValueValidator, MaxValueValidator
from django.urls import reverse
from django.utils.timezone import now
from wx.enums import FlashTypeEnum
class BaseModel(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class Decoder(BaseModel):
name = models.CharField(
max_length=40
)
description = models.CharField(
max_length=256
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class Country(BaseModel):
code = models.CharField(max_length=2)
name = models.CharField(max_length=256, unique=True)
class Meta:
verbose_name_plural = "countries"
def __str__(self):
return self.name
class Interval(BaseModel):
symbol = models.CharField(max_length=8, )
description = models.CharField(max_length=40)
default_query_range = models.IntegerField(default=0)
seconds = models.IntegerField(null=True)
class Meta:
ordering = ('symbol',)
def __str__(self):
return self.symbol
class PhysicalQuantity(BaseModel):
name = models.CharField(
max_length=16,
unique=True
)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class MeasurementVariable(BaseModel):
name = models.CharField(
max_length=40,
unique=True
)
physical_quantity = models.ForeignKey(
PhysicalQuantity,
on_delete=models.DO_NOTHING
)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class CodeTable(BaseModel):
name = models.CharField(
max_length=45,
unique=True
)
description = models.CharField(
max_length=256,
)
def __str__(self):
return self.name
class Unit(BaseModel):
symbol = models.CharField(
max_length=16,
unique=True
)
name = models.CharField(
max_length=256,
unique=True
)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class SamplingOperation(BaseModel):
"""Old sampling_operations table"""
symbol = models.CharField(
max_length=5,
unique=True
)
name = models.CharField(
max_length=40,
unique=True
)
class Meta:
ordering = ('symbol',)
def __str__(self):
return self.name
class Variable(BaseModel):
"""Old element table"""
variable_type = models.CharField(
max_length=40,
)
symbol = models.CharField(
max_length=8,
)
name = models.CharField(
max_length=40,
)
sampling_operation = models.ForeignKey(
SamplingOperation,
on_delete=models.DO_NOTHING,
null=True,
blank=True,
)
measurement_variable = models.ForeignKey(
MeasurementVariable,
on_delete=models.DO_NOTHING,
null=True,
blank=True,
)
unit = models.ForeignKey(
Unit,
on_delete=models.DO_NOTHING,
null=True,
blank=True,
)
precision = models.IntegerField(
null=True,
blank=True,
)
scale = models.IntegerField(
null=True,
blank=True,
)
code_table = models.ForeignKey(
CodeTable,
on_delete=models.DO_NOTHING,
null=True,
blank=True,
)
color = ColorField(default='#FF0000', null=True, blank=True)
range_min = models.IntegerField(
null=True,
blank=True,
)
range_max = models.IntegerField(
null=True,
blank=True,
)
default_representation = models.CharField(
max_length=60,
null=True,
blank=True,
default='line',
choices=[('line', 'Line'), ('point', 'Point'), ('bar', 'Bar'), ('column', 'Column')])
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
class DataSource(BaseModel):
symbol = models.CharField(max_length=8, unique=True)
name = models.CharField(max_length=32, unique=True)
base_url = models.URLField(null=True)
location = models.CharField(max_length=256, null=True)
class Meta:
verbose_name = "data source"
verbose_name_plural = "data sources"
def __str__(self):
return self.name
class StationProfile(BaseModel):
name = models.CharField(max_length=45)
description = models.CharField(max_length=256)
color = models.CharField(max_length=7)
is_automatic = models.BooleanField(default=False)
is_manual = models.BooleanField(default=True)
class Meta:
verbose_name = "station profile"
verbose_name_plural = "station profiles"
def __str__(self):
return self.name
class AdministrativeRegionType(BaseModel):
name = models.CharField(max_length=45)
class Meta:
verbose_name = "administrative region type"
verbose_name_plural = "administrative region types"
def __str__(self):
return self.name
class AdministrativeRegion(BaseModel):
name = models.CharField(max_length=45)
country = models.ForeignKey(Country, on_delete=models.DO_NOTHING)
administrative_region_type = models.ForeignKey(AdministrativeRegionType, on_delete=models.DO_NOTHING)
class Meta:
verbose_name = "administrative region"
verbose_name_plural = "administrative regions"
def __str__(self):
return self.name
class StationType(BaseModel):
name = models.CharField(max_length=45)
description = models.CharField(max_length=256)
parent_type = models.ForeignKey('self', on_delete=models.DO_NOTHING, null=True)
class Meta:
verbose_name = "station type"
verbose_name_plural = "station types"
def __str__(self):
return self.name
class StationCommunication(BaseModel):
name = models.CharField(max_length=45)
description = models.CharField(max_length=256)
color = models.CharField(max_length=7)
class Meta:
verbose_name = "station communication"
verbose_name_plural = "station communications"
def __str__(self):
return self.description
class WMOStationType(BaseModel):
name = models.CharField(max_length=256, unique=True)
description = models.CharField(max_length=256, null=True, blank=True)
def __str__(self):
return self.name
class WMORegion(BaseModel):
name = models.CharField(max_length=256, unique=True)
description = models.CharField(max_length=256, null=True, blank=True)
def __str__(self):
return self.name
class WMOProgram(BaseModel):
name = models.CharField(max_length=256, unique=True)
description = models.CharField(max_length=256, null=True, blank=True)
def __str__(self):
return self.name
class Station(BaseModel):
name = models.CharField(max_length=256)
alias_name = models.CharField(max_length=256, null=True, blank=True)
begin_date = models.DateTimeField(null=True, blank=True)
end_date = models.DateTimeField(null=True, blank=True)
longitude = models.FloatField(validators=[
MinValueValidator(-180.), MaxValueValidator(180.)
])
latitude = models.FloatField(validators=[
MinValueValidator(-90.),
MaxValueValidator(90.)
])
elevation = models.FloatField(null=True, blank=True)
code = models.CharField(max_length=64)
wmo = models.IntegerField(
null=True,
blank=True
)
wigos = models.CharField(
null=True,
max_length=64,
blank=True
)
is_active = models.BooleanField(default=False)
is_automatic = models.BooleanField(default=True)
organization = models.CharField(
max_length=256,
null=True,
blank=True
)
observer = models.CharField(
max_length=256,
null=True,
blank=True
)
watershed = models.CharField(
max_length=256,
null=True,
blank=True
)
z = models.FloatField(
null=True,
blank=True
)
datum = models.CharField(
max_length=256,
null=True,
blank=True
)
zone = models.CharField(
max_length=256,
null=True,
blank=True
)
ground_water_province = models.CharField(
max_length=256,
null=True,
blank=True
)
river_code = models.IntegerField(
null=True,
blank=True
)
river_course = models.CharField(
max_length=64,
null=True,
blank=True
)
catchment_area_station = models.CharField(
max_length=256,
null=True,
blank=True
)
river_origin = models.CharField(
max_length=256,
null=True,
blank=True
)
easting = models.FloatField(
null=True,
blank=True
)
northing = models.FloatField(
null=True,
blank=True
)
river_outlet = models.CharField(
max_length=256,
null=True,
blank=True
)
river_length = models.IntegerField(
null=True,
blank=True
)
local_land_use = models.CharField(
max_length=256,
null=True,
blank=True
)
soil_type = models.CharField(
max_length=64,
null=True,
blank=True
)
site_description = models.CharField(
max_length=256,
null=True,
blank=True
)
land_surface_elevation = models.FloatField(
null=True,
blank=True
)
screen_length = models.FloatField(
null=True,
blank=True
)
top_casing_land_surface = models.FloatField(
null=True,
blank=True
)
depth_midpoint = models.FloatField(
null=True,
blank=True
)
screen_size = models.FloatField(
null=True,
blank=True
)
casing_type = models.CharField(
max_length=256,
null=True,
blank=True
)
casing_diameter = models.FloatField(
null=True,
blank=True
)
existing_gauges = models.CharField(
max_length=256,
null=True,
blank=True
)
flow_direction_at_station = models.CharField(
max_length=256,
null=True,
blank=True
)
flow_direction_above_station = models.CharField(
max_length=256,
null=True,
blank=True
)
flow_direction_below_station = models.CharField(
max_length=256,
null=True,
blank=True
)
bank_full_stage = models.CharField(
max_length=256,
null=True,
blank=True
)
bridge_level = models.CharField(
max_length=256,
null=True,
blank=True
)
access_point = models.CharField(
max_length=256,
null=True,
blank=True
)
temporary_benchmark = models.CharField(
max_length=256,
null=True,
blank=True
)
mean_sea_level = models.CharField(
max_length=256,
null=True,
blank=True
)
data_type = models.CharField(
max_length=256,
null=True,
blank=True
)
frequency_observation = models.CharField(
max_length=256,
null=True,
blank=True
)
historic_events = models.CharField(
max_length=256,
null=True,
blank=True
)
other_information = models.CharField(
max_length=256,
null=True,
blank=True
)
profile = models.ForeignKey(
StationProfile,
on_delete=models.DO_NOTHING,
null=True,
blank=True
)
hydrology_station_type = models.CharField(
max_length=64,
null=True,
blank=True
)
is_surface = models.BooleanField(default=True) # options are surface or ground
station_details = models.CharField(
max_length=256,
null=True,
blank=True
)
country = models.CharField(
max_length=256,
null=True,
blank=True
)
region = models.CharField(
max_length=256,
null=True,
blank=True
)
data_source = models.ForeignKey(
DataSource,
on_delete=models.DO_NOTHING,
null=True,
blank=True
)
communication_type = models.ForeignKey(
StationCommunication,
on_delete=models.DO_NOTHING,
null=True,
blank=True
)
utc_offset_minutes = models.IntegerField(
validators=[
MaxValueValidator(720),
MinValueValidator(-720)
])
alternative_names = models.CharField(
max_length=256,
null=True,
blank=True
)
wmo_station_type = models.ForeignKey(
WMOStationType,
on_delete=models.DO_NOTHING,
null=True,
blank=True
)
wmo_region = models.ForeignKey(
WMORegion,
on_delete=models.DO_NOTHING,
null=True,
blank=True
)
wmo_program = models.ForeignKey(
WMOProgram,
on_delete=models.DO_NOTHING,
null=True,
blank=True
)
wmo_station_plataform = models.CharField(
max_length=256,
null=True,
blank=True
)
operation_status = models.BooleanField(default=True)
class Meta:
unique_together = ('data_source', 'code')
ordering = ('name',)
def get_absolute_url(self):
"""Returns the url to access a particular instance of MyModelName."""
return reverse('station-detail', args=[str(self.id)])
def __str__(self):
return self.name + ' - ' + self.code
class StationVariable(BaseModel):
station = models.ForeignKey(Station, on_delete=models.DO_NOTHING)
variable = models.ForeignKey(Variable, on_delete=models.DO_NOTHING)
first_measurement = models.DateTimeField(null=True, blank=True)
last_measurement = models.DateTimeField(null=True, blank=True)
last_value = models.FloatField(null=True, blank=True)
height = models.FloatField(null=True, blank=True)
test_range_min = models.FloatField(null=True, blank=True)
test_range_max = models.FloatField(null=True, blank=True)
test_step_min = models.FloatField(null=True, blank=True)
test_step_max = models.FloatField(null=True, blank=True)
test_persistence_variance = models.FloatField(null=True, blank=True)
test_persistence_interval = models.FloatField(null=True, blank=True)
test_spike_value = models.FloatField(null=True, blank=True)
last_data_datetime = models.DateTimeField(null=True, blank=True)
last_data_value = models.FloatField(null=True, blank=True)
last_data_code = models.CharField(max_length=60, null=True, blank=True)
class Meta:
unique_together = ("station", "variable")
ordering = ["station__id", "variable__id", ]
class QualityFlag(BaseModel):
symbol = models.CharField(max_length=8, unique=True)
name = models.CharField(max_length=256, unique=True)
color = ColorField(default='#FF0000', null=True, blank=True)
def __str__(self):
return self.name
def document_directory_path(instance, filename):
# file will be uploaded to MEDIA_ROOT/user_<id>/<filename>
path_to_file = 'documents/{0}_{1}.{2}'.format(instance.station.code, time.strftime("%Y%m%d_%H%M%S"),
filename.split('.')[-1])
logging.info(f"Saving file {filename} in {path_to_file}")
return path_to_file
class Document(BaseModel):
alias = models.CharField(max_length=256, null=True)
file = models.FileField(upload_to=document_directory_path)
station = models.ForeignKey(Station, on_delete=models.CASCADE)
processed = models.BooleanField(default=False)
decoder = models.ForeignKey(Decoder, on_delete=models.CASCADE, null=True, blank=True)
def __str__(self):
return self.file.name
class DataFile(BaseModel):
ready_at = models.DateTimeField(null=True, blank=True)
ready = models.BooleanField(default=False)
initial_date = models.DateTimeField(null=True, blank=True)
final_date = models.DateTimeField(null=True, blank=True)
source = models.CharField(max_length=30, null=False, blank=False, default="Raw data")
lines = models.IntegerField(null=True, blank=True, default=None)
prepared_by = models.CharField(max_length=256, null=True, blank=True)
interval_in_seconds = models.IntegerField(null=True, blank=True)
def __str__(self):
return 'file ' + str(self.id)
class DataFileStation(BaseModel):
datafile = models.ForeignKey(DataFile, on_delete=models.CASCADE)
station = models.ForeignKey(Station, on_delete=models.CASCADE)
class DataFileVariable(BaseModel):
datafile = models.ForeignKey(DataFile, on_delete=models.CASCADE)
variable = models.ForeignKey(Variable, on_delete=models.CASCADE)
class StationFile(BaseModel):
name = models.CharField(max_length=256, null=True)
file = models.FileField(upload_to='station_files/%Y/%m/%d/')
station = models.ForeignKey(Station, on_delete=models.CASCADE)
def __str__(self):
return self.name
class Format(BaseModel):
name = models.CharField(
max_length=40,
unique=True,
)
description = models.CharField(
max_length=256,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class VariableFormat(BaseModel):
variable = models.ForeignKey(
Variable,
on_delete=models.DO_NOTHING,
)
format = models.ForeignKey(
Format,
on_delete=models.DO_NOTHING,
)
interval = models.ForeignKey(
Interval,
on_delete=models.DO_NOTHING,
)
lookup_key = models.CharField(
max_length=255,
)
class Meta:
ordering = ['variable', 'format', ]
def __str__(self):
return '{} {}'.format(self.variable, self.format)
class PeriodicJobType(BaseModel):
name = models.CharField(
max_length=40,
unique=True,
)
description = models.CharField(
max_length=256,
)
class Meta:
ordering = ['name']
def __str__(self):
return self.name
class PeriodicJob(BaseModel):
periodic_job_type = models.ForeignKey(
PeriodicJobType,
on_delete=models.DO_NOTHING,
)
station = models.ForeignKey(
Station,
on_delete=models.DO_NOTHING,
)
is_running = models.BooleanField(
default=False,
)
last_record = models.IntegerField(
default=0,
)
| |
<reponame>idax4325/veropt
import torch
import gpytorch
import botorch
import math
from copy import deepcopy
import numpy as np
import matplotlib.pyplot as plt
import os
from typing import List, Tuple
class ExactGPModel(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y, likelihood, n_params):
super(ExactGPModel, self).__init__(train_x, train_y, likelihood)
self.mean_module = gpytorch.means.ConstantMean()
self.covar_module = gpytorch.kernels.ScaleKernel(
gpytorch.kernels.SpectralMixtureKernel(num_mixtures=10, ard_num_dims=n_params))
# self.covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.MaternKernel(ard_num_dims=param_amount))
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
return gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
class LargeFeatureExtractor(torch.nn.Sequential):
def __init__(self, layout, n_params):
super(LargeFeatureExtractor, self).__init__()
self.add_module('linear1', torch.nn.Linear(n_params, layout[0]))
self.add_module('relu1', torch.nn.ReLU())
self.add_module('linear2', torch.nn.Linear(layout[0], layout[1]))
self.add_module('relu2', torch.nn.ReLU())
self.add_module('linear3', torch.nn.Linear(layout[1], layout[2]))
self.add_module('relu3', torch.nn.ReLU())
self.add_module('linear4', torch.nn.Linear(layout[2], layout[3]))
class DKModel(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y, likelihood, layout, n_params):
super(DKModel, self).__init__(train_x, train_y, likelihood)
self.mean_module = gpytorch.means.ConstantMean()
# self.covar_module = gpytorch.kernels.SpectralMixtureKernel(
# num_mixtures=5,
# ard_num_dims=layout[-1],
# batch_shape=torch.Size([1]))
self.covar_module = gpytorch.kernels.MaternKernel(ard_num_dims=layout[-1])
self.feature_extractor = LargeFeatureExtractor(layout, n_params)
def forward(self, x):
# We're first putting our data through a deep net (feature extractor)
# We're also scaling the features so that they're nice values
projected_x = self.feature_extractor(x)
projected_x = projected_x.squeeze(0)
projected_x = projected_x - projected_x.min(0)[0]
projected_x = 2 * (projected_x / projected_x.max(0)[0]) - 1
mean_x = self.mean_module(projected_x)
covar_x = self.covar_module(projected_x.unsqueeze(0))
return gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
class SMKModelBO(gpytorch.models.ExactGP, botorch.models.gpytorch.GPyTorchModel):
_num_outputs = 1
def __init__(self, train_X, train_Y, n_params, num_mixtures=10):
# squeeze output dim before passing train_Y to ExactGP. Ida: Don't know if it's necessary
super().__init__(train_X, train_Y.squeeze(-1), gpytorch.likelihoods.GaussianLikelihood())
self.mean_module = gpytorch.means.ConstantMean()
# It says in the docs that one shouldn't use a ScaleKernel with the SMK
self.covar_module = gpytorch.kernels.SpectralMixtureKernel(
num_mixtures=num_mixtures,
ard_num_dims=n_params,
batch_shape=torch.Size([1]))
self.to(train_X) # make sure we're on the right device/dtype
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
return gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
class MaternModelBO(gpytorch.models.ExactGP, botorch.models.gpytorch.GPyTorchModel):
_num_outputs = 1
def __init__(self, train_X, train_Y, n_params):
# squeeze output dim before passing train_Y to ExactGP. Don't know if it's necessary
super().__init__(train_X, train_Y.squeeze(-1), gpytorch.likelihoods.GaussianLikelihood())
self.mean_module = gpytorch.means.ConstantMean()
# Note: Removed the ScaleKernel after I normalised the data myself
self.covar_module = gpytorch.kernels.MaternKernel(
ard_num_dims=n_params,
batch_shape=torch.Size([]))
self.to(train_X) # make sure we're on the right device/dtype
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
return gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
class RBFModelBO(gpytorch.models.ExactGP, botorch.models.gpytorch.GPyTorchModel):
_num_outputs = 1
def __init__(self, train_X, train_Y, n_params):
# squeeze output dim before passing train_Y to ExactGP. Don't know if it's necessary
super().__init__(train_X, train_Y.squeeze(-1), gpytorch.likelihoods.GaussianLikelihood())
self.mean_module = gpytorch.means.ConstantMean()
# Note: Removed the ScaleKernel after I normalised the data myself
self.covar_module = gpytorch.kernels.RBFKernel(
ard_num_dims=n_params,
batch_shape=torch.Size([]))
self.to(train_X) # make sure we're on the right device/dtype
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
return gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
class MaternKernelPrior(gpytorch.kernels.Kernel):
r"""
Copied from GPytorch matern_kernel and modified to include a prior distribution
"""
has_lengthscale = True
def __init__(self, cdf, nu=2.5, **kwargs):
if nu not in {0.5, 1.5, 2.5}:
raise RuntimeError("nu expected to be 0.5, 1.5, or 2.5")
super(MaternKernelPrior, self).__init__(**kwargs)
self.nu = nu
self.cdf = cdf
def forward(self, x1, x2, diag=False, **params):
if x1.requires_grad or x2.requires_grad or (self.ard_num_dims is not None and self.ard_num_dims > 1) or diag:
# I thiiink I can do this. Maybe. Might wanna check if the lengthscale thing is ok. But is should be. Right?
x1 = self.cdf(x1)
x2 = self.cdf(x2)
mean = x1.reshape(-1, x1.size(-1)).mean(0)[(None,) * (x1.dim() - 1)]
x1_ = (x1 - mean).div(self.lengthscale)
x2_ = (x2 - mean).div(self.lengthscale)
distance = self.covar_dist(x1_, x2_, diag=diag, **params)
exp_component = torch.exp(-math.sqrt(self.nu * 2) * distance)
if self.nu == 0.5:
constant_component = 1
elif self.nu == 1.5:
constant_component = (math.sqrt(3) * distance).add(1)
elif self.nu == 2.5:
constant_component = (math.sqrt(5) * distance).add(1).add(5.0 / 3.0 * distance ** 2)
return constant_component * exp_component
return gpytorch.functions.matern_covariance.MaternCovariance().apply(
x1, x2, self.lengthscale, self.nu, lambda x1, x2: self.covar_dist(x1, x2, **params)
)
class MaternPriorModelBO(gpytorch.models.ExactGP, botorch.models.gpytorch.GPyTorchModel):
_num_outputs = 1
def __init__(self, train_X, train_Y, n_params, cdf):
# squeeze output dim before passing train_Y to ExactGP. Don't know if it's necessary
super().__init__(train_X, train_Y.squeeze(-1), gpytorch.likelihoods.GaussianLikelihood())
self.mean_module = gpytorch.means.ConstantMean()
# Note: Removed the ScaleKernel after I normalised the data myself
self.covar_module = MaternKernelPrior(
cdf=cdf,
ard_num_dims=n_params,
batch_shape=torch.Size([]))
self.to(train_X) # make sure we're on the right device/dtype
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
return gpytorch.distributions.MultivariateNormal(mean_x, covar_x)
class DKModelBO(DKModel, botorch.models.gpytorch.GPyTorchModel):
_num_outputs = 1
def __init__(self, train_X, train_Y, feature_extractor, n_params):
super().__init__(train_X, train_Y.squeeze(-1), gpytorch.likelihoods.GaussianLikelihood(), feature_extractor,
n_params)
class BayesOptModel:
def __init__(self, n_params, n_objs, model_class_list: List[MaternModelBO] = None, init_train_its=1000,
train_its=200, lr=0.1, opt_params_list=None, using_priors=False, constraint_dict_list=None):
self.n_params = n_params
self.n_objs = n_objs
if self.n_objs > 1:
self.multi_obj = True
else:
self.multi_obj = False
self.init_train_its = init_train_its
self.train_its = train_its
self.lr = lr
self.model_list = None
self.model_class_list = None
self.init_model_class_list(model_class_list)
self.model = None # The model is initiated under init_model
self.likelihood = None
self.mll = None
self.optimiser = None
self.loss_list = []
self.init_opt_params_list(opt_params_list)
self.init_constraint_dict_list(constraint_dict_list)
self.using_priors = using_priors
self.prior_class = None
def init_model_class_list(self, model_class_list):
# TODO: Make these more sensible, consider printing warnings if input is wrong
if model_class_list is None:
self.model_class_list = [MaternModelBO] * self.n_objs
elif not isinstance(model_class_list, list):
self.model_class_list = [model_class_list] * self.n_objs
elif isinstance(model_class_list, list) and len(model_class_list) < self.n_objs:
self.model_class_list = model_class_list
while len(self.model_class_list) < self.n_objs:
self.model_class_list.append(MaternModelBO)
else:
self.model_class_list = model_class_list
def init_opt_params_list(self, opt_params_list):
if opt_params_list is None:
self.opt_params_list = []
for model_no in range(self.n_objs):
if "Matern" in self.model_class_list[model_no].__name__ or "RBF" in self.model_class_list[model_no].__name__:
self.opt_params_list.append(["mean_module", "covar_module"])
else:
self.opt_params_list.append(None)
elif not isinstance(opt_params_list, list):
self.opt_params_list = [opt_params_list] * self.n_objs
elif isinstance(opt_params_list, list) and len(opt_params_list) < self.n_objs:
self.opt_params_list = opt_params_list
for model_no in range(len(self.opt_params_list), self.n_objs):
if "Matern" in self.model_class_list[model_no].__name__ or "RBF" in self.model_class_list[model_no].__name__:
self.opt_params_list.append(["mean_module", "covar_module"])
else:
self.opt_params_list.append(None)
else:
self.opt_params_list = opt_params_list
def init_constraint_dict_list(self, constraint_dict_list):
if constraint_dict_list is None:
self.constraint_dict_list = []
for model_no in range(self.n_objs):
if "Matern" in self.model_class_list[model_no].__name__ or "RBF" in self.model_class_list[model_no].__name__:
self.constraint_dict_list.append({
"covar_module": {
"raw_lengthscale": [0.1, 2.0]}
})
else:
self.constraint_dict_list.append(None)
elif not isinstance(constraint_dict_list, list):
self.constraint_dict_list = [constraint_dict_list] * self.n_objs
elif isinstance(constraint_dict_list, list) and len(constraint_dict_list) < self.n_objs:
self.constraint_dict_list = constraint_dict_list
for model_no in range(len(self.opt_params_list), self.n_objs):
if "Matern" in self.model_class_list[model_no].__name__ or "RBF" in self.model_class_list[model_no].__name__:
self.constraint_dict_list.append({
"covar_module": {
"raw_lengthscale": [0.1, 2.0]}
})
else:
self.constraint_dict_list.append(None)
else:
self.constraint_dict_list = constraint_dict_list
def eval(self, x: torch.Tensor):
self.set_eval()
# if self.multi_obj:
y = self.likelihood(*self.model(*[x] * self.n_objs))
# self.set_train()
if not self.multi_obj:
y = [y]
return y
# else:
# y = self.likelihood(self.model(x))
# self.set_train()
# return y
def set_priors(self, prior_class):
self.prior_class = prior_class
def reset_model(self, x: torch.Tensor, y_split: Tuple):
models = []
if self.using_priors is False:
for model_no in range(self.n_objs):
models.append(self.model_class_list[model_no](x, y_split[model_no], self.n_params))
else:
for model_no in range(self.n_objs):
models.append(self.model_class_list[model_no](x, y_split[model_no], self.n_params,
self.prior_class.prior_cdf))
if self.n_objs == 1:
self.model_list = models
self.model = models[0]
else:
self.model_list = models
self.model = botorch.models.ModelListGP(*models)
def register_constraints(self, module: str, par_name: str, constraints: List, model_no: int):
if self.multi_obj:
self.model.models[model_no].__getattr__(module).register_constraint(
par_name, gpytorch.constraints.Interval(*constraints)
)
else:
self.model.__getattr__(module).register_constraint(
par_name, gpytorch.constraints.Interval(*constraints)
)
def update_constraints(self):
for model_no in range(self.n_objs):
if self.constraint_dict_list[model_no] is not None:
for module in self.constraint_dict_list[model_no]:
for var in self.constraint_dict_list[model_no][module]:
self.register_constraints(module, var, self.constraint_dict_list[model_no][module][var], model_no)
# TODO: Make better. So far this is hardcoded and assumes no noise.
if self.multi_obj:
self.model.models[model_no].likelihood.noise_covar.register_constraint(
'raw_noise', gpytorch.constraints.GreaterThan(10**(-10)))
self.model.models[model_no].likelihood.raw_noise = torch.tensor(-500.0)
else:
self.model.likelihood.noise_covar.register_constraint(
'raw_noise', gpytorch.constraints.GreaterThan(10**(-10)))
self.model.likelihood.raw_noise = torch.tensor(-500.0)
def view_model_hyperparameter(self, module: str, par_name: str, model_no: int):
# self.model.__getattr__(module)
constraint_name = par_name + "_constraint"
if self.multi_obj:
constraint = self.model.models[model_no].__getattr__(module).__getattr__(constraint_name)
return constraint.transform(deepcopy(self.model.models[model_no].__getattr__(module).__getattr__(par_name)))
else:
constraint = self.model.__getattr__(module).__getattr__(constraint_name)
return constraint.transform(deepcopy(self.model.__getattr__(module).__getattr__(par_name)))
def view_constrained_hyperparameters(self):
for model_no in range(self.n_objs):
constraint_dict = self.constraint_dict_list[model_no]
for module in constraint_dict:
for par_name in constraint_dict[module]:
par_val = self.view_model_hyperparameter(module, par_name, model_no)
print(f"{par_name} has value(s):")
print(par_val)
def refit_model(self, x: torch.Tensor, y: torch.Tensor):
y_split = y.split(1, dim=2)
self.reset_model(x, y_split)
if self.multi_obj:
self.likelihood = gpytorch.likelihoods.LikelihoodList(
*[self.model.models[model_no].likelihood for model_no in range(self.n_objs)])
else:
self.likelihood = self.model.likelihood
self.model.train()
self.likelihood.train()
if self.multi_obj:
self.mll = gpytorch.mlls.SumMarginalLogLikelihood(self.likelihood, self.model)
else:
self.mll = gpytorch.mlls.ExactMarginalLogLikelihood(self.likelihood, self.model)
self.set_optimiser()
self.update_constraints()
self.train_backwards(self.init_train_its)
def update_model(self, x: torch.Tensor, y: torch.Tensor):
y_split = y.split(1, dim=2)
state_dict = self.model.state_dict()
self.reset_model(x, y_split)
if self.multi_obj:
self.likelihood = gpytorch.likelihoods.LikelihoodList(
*[self.model.models[model_no].likelihood for model_no in range(self.n_objs)])
else:
self.likelihood = self.model.likelihood
self.model.load_state_dict(state_dict)
self.model.train()
self.likelihood.train()
if self.multi_obj:
self.mll = gpytorch.mlls.SumMarginalLogLikelihood(self.likelihood, self.model)
else:
self.mll = gpytorch.mlls.ExactMarginalLogLikelihood(self.likelihood, self.model)
self.set_optimiser()
self.update_constraints()
self.train_backwards()
def set_optimiser(self):
opt_list = []
for model_no in range(self.n_objs):
if self.opt_params_list[model_no] is None:
# for par_name, par in zip(self.model.named_parameters(), self.model.parameters()):
# if f'models.{model_no}' in par_name[0]:
# opt_list.append(par)
if self.multi_obj:
opt_list.append({'params': self.model.models[model_no].parameters()})
else:
opt_list.append({'params': self.model.parameters()})
else:
if self.multi_obj:
for opt_module in self.opt_params_list[model_no]:
opt_list.append({'params': self.model.models[model_no].__getattr__(opt_module).parameters()})
else:
for opt_module in self.opt_params_list[model_no]:
opt_list.append({'params': self.model.__getattr__(opt_module).parameters()})
self.optimiser = torch.optim.Adam(opt_list, lr=self.lr)
def train_backwards(self, its: int = None):
running_on_slurm = "SLURM_JOB_ID" in os.environ
verbose = not running_on_slurm
if its is None:
its | |
<reponame>andrewgolman/Linguistic-Self-Attention-for-SRL
import tensorflow as tf
import nn_utils
import tensorflow.keras.layers as L
from base_fns import FunctionDispatcher
from tensorflow_addons.text.crf import crf_decode, crf_log_likelihood, viterbi_decode
from opennmt.utils.misc import shape_list
from opennmt.layers.position import SinusoidalPositionEncoder
class OutputLayer(FunctionDispatcher):
def __init__(self, transformer_layer_id, task_map, **params):
super(OutputLayer, self).__init__(task_map, **params)
self.transformer_layer_id = transformer_layer_id
self.hparams = params['hparams']
# if task_map.get('viterbi') or task_map.get('crf'):
if self.static_params['transition_params'] is not None:
self.static_params['transition_params'] = tf.convert_to_tensor(
self.static_params['transition_params'], dtype=tf.float32)
def make_call(self, data, **params):
raise NotImplementedError
def loss(self, targets, output, mask):
raise NotImplementedError
class SoftmaxClassifier(OutputLayer):
def __init__(self, transformer_layer_id, task_vocab_size, **params):
super(SoftmaxClassifier, self).__init__(
transformer_layer_id, task_vocab_size=task_vocab_size, **params
)
self.dropout = L.Dropout(1 - self.hparams.mlp_dropout, noise_shape=[None, 1, self.hparams.sa_hidden_size])
self.dense = L.Dense(task_vocab_size, activation=L.LeakyReLU(alpha=0.1))
self.eval_loss = tf.keras.losses.CategoricalCrossentropy(
from_logits=True,
label_smoothing=self.hparams.label_smoothing,
reduction=tf.keras.losses.Reduction.SUM,
)
def make_call(self, data, **kwargs):
features, mask = data
features = self.dropout(features)
logits = self.dense(features)
predictions = tf.cast(tf.argmax(logits, axis=-1), tf.int32)
output = {
'predictions': predictions,
'scores': logits,
'probabilities': tf.nn.softmax(logits, -1)
}
return output
def loss(self, targets, output, mask):
logits = output['scores']
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=targets)
n_tokens = tf.reduce_sum(mask)
return tf.reduce_sum(cross_entropy * mask) / n_tokens
class JointSoftmaxClassifier(OutputLayer):
def __init__(self, transformer_layer_id, **params):
super(JointSoftmaxClassifier, self).__init__(transformer_layer_id, **params)
shape0 = self.hparams.sa_hidden_size
shape1 = self.static_params['model_config']['predicate_pred_mlp_size']
shape2 = self.static_params['task_vocab_size']
self.dropout1 = L.Dropout(1 - self.hparams.mlp_dropout, noise_shape=[None, 1, shape0])
self.dense1 = L.Dense(shape1, activation=L.LeakyReLU(alpha=0.1))
self.dropout2 = L.Dropout(1 - self.hparams.mlp_dropout, noise_shape=[None, 1, shape1])
self.dense2 = L.Dense(shape2, activation=L.LeakyReLU(alpha=0.1))
def get_separate_scores_preds_from_joint(self, joint_outputs, joint_num_labels):
joint_maps = self.static_params['joint_maps']
predictions = joint_outputs['predictions']
scores = joint_outputs['scores']
output_shape = tf.shape(predictions)
batch_size = output_shape[0]
batch_seq_len = output_shape[1]
sep_outputs = {}
for map_name, label_comp_map in joint_maps.items():
short_map_name = map_name.split('_to_')[-1]
# marginalize out probabilities for this task
task_num_labels = tf.shape(tf.unique(tf.reshape(label_comp_map, [-1]))[0])[0]
joint_probabilities = tf.nn.softmax(scores)
joint_probabilities_flat = tf.reshape(joint_probabilities, [-1, joint_num_labels])
segment_ids = tf.squeeze(tf.nn.embedding_lookup(label_comp_map, tf.range(joint_num_labels)), -1)
segment_scores = tf.math.unsorted_segment_sum(tf.transpose(joint_probabilities_flat), segment_ids, task_num_labels)
segment_scores = tf.reshape(tf.transpose(segment_scores), [batch_size, batch_seq_len, task_num_labels])
sep_outputs["%s_probabilities" % short_map_name] = segment_scores
# use marginalized probabilities to get predictions
sep_outputs["%s_predictions" % short_map_name] = tf.argmax(segment_scores, -1)
return sep_outputs
def make_call(self, data, **kwargs):
# features: [BATCH_SIZE, SEQ_LEN, SA_HID]
# mask: [BATCH_SIZE, SEQ_LEN]
features, mask = data
logits = self.dropout1(features)
logits = self.dense1(logits)
logits = self.dropout2(logits)
logits = self.dense2(logits)
predictions = tf.cast(tf.argmax(logits, axis=-1), tf.int32)
output = {
'predictions': predictions, # [BATCH_SIZE, SEQ_LEN]
'scores': logits, # [BATCH_SIZE, SEQ_LEN, VOCAB_SIZE]
'probabilities': tf.nn.softmax(logits, -1) # [BATCH_SIZE, SEQ_LEN, VOCAB_SIZE]
}
n_labels = self.static_params['task_vocab_size']
# now get separate-task scores and predictions for each of the maps we've passed through
separate_output = self.get_separate_scores_preds_from_joint(output, n_labels)
output.update(separate_output)
return output
def loss(self, targets, output, mask):
logits = output['scores']
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=targets)
n_tokens = tf.reduce_sum(mask)
return tf.reduce_sum(cross_entropy * mask) / n_tokens
class ParseBilinear(OutputLayer):
# todo architecture: unify parse bilinear and conditional bilinear
def __init__(self, transformer_layer_id, **params):
super(ParseBilinear, self).__init__(transformer_layer_id, **params)
self.class_mlp_size = self.static_params['model_config']['class_mlp_size']
self.attn_mlp_size = self.static_params['model_config']['attn_mlp_size']
self.dropout1 = L.Dropout(1 - self.hparams.mlp_dropout, noise_shape=[None, 1, self.hparams.sa_hidden_size])
self.dense1 = L.Dense(2 * (self.class_mlp_size + self.attn_mlp_size),
activation=L.LeakyReLU(alpha=0.1))
self.bilinear = nn_utils.BilinearClassifier(
1, dropout=1 - self.hparams.bilinear_dropout,
left_input_size=self.attn_mlp_size, right_input_size=self.attn_mlp_size
)
def make_call(self, data, **kwargs):
features, mask = data
features = self.dropout1(features)
features = self.dense1(features)
dep_mlp, head_mlp = tf.split(value=features, num_or_size_splits=2, axis=-1)
dep_arc_mlp, dep_rel_mlp = dep_mlp[:, :, :self.attn_mlp_size], dep_mlp[:, :, self.attn_mlp_size:]
head_arc_mlp, head_rel_mlp = head_mlp[:, :, :self.attn_mlp_size], head_mlp[:, :, self.attn_mlp_size:]
# [batch_size x seq_len x seq_len]
arc_logits = self.bilinear([dep_arc_mlp, head_arc_mlp])
arc_logits = tf.squeeze(arc_logits, axis=2)
predictions = tf.argmax(arc_logits, -1)
probabilities = tf.nn.softmax(arc_logits)
output = {
'predictions': predictions, # [BATCH_SIZE, SEQ_LEN] (predictions for arcs)
'probabilities': probabilities, # [BATCH_SIZE, SEQ_LEN, SEQ_LEN]
'scores': arc_logits, # [BATCH_SIZE, SEQ_LEN, SEQ_LEN]
'dep_rel_mlp': dep_rel_mlp, # [BATCH_SIZE, SEQ_LEN, class_mlp_size]
'head_rel_mlp': head_rel_mlp # [BATCH_SIZE, SEQ_LEN, class_mlp_size]
}
return output
def loss(self, targets, output, mask):
logits = output['scores']
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=targets)
n_tokens = tf.reduce_sum(mask)
return tf.reduce_sum(cross_entropy * mask) / n_tokens
class ConditionalBilinear(OutputLayer):
def __init__(self, transformer_layer_id, **params):
super(ConditionalBilinear, self).__init__(transformer_layer_id, **params)
self.cond_bilinear = nn_utils.ConditionalBilinearClassifier(
self.static_params['task_vocab_size'],
1 - self.hparams.bilinear_dropout,
self.static_params['model_config']['class_mlp_size'],
self.static_params['model_config']['class_mlp_size'],
)
def make_call(self, data, dep_rel_mlp, head_rel_mlp, parse_preds_train, parse_preds_eval, **kwargs):
# data is ingored, utilizing previous outputs/labels
parse_preds = parse_preds_train if self.teacher_forcing else parse_preds_eval
logits, _ = self.cond_bilinear([dep_rel_mlp, head_rel_mlp, parse_preds])
predictions = tf.argmax(logits, -1)
probabilities = tf.nn.softmax(logits)
output = {
'scores': logits, # [BATCH_SIZE, SEQ_LEN, SEQ_LEN]
'predictions': predictions, # [BATCH_SIZE, SEQ_LEN] (conditional arcs)
'probabilities': probabilities, # [BATCH_SIZE, SEQ_LEN, SEQ_LEN]
}
return output
def loss(self, targets, output, mask):
logits = output['scores']
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=targets)
n_tokens = tf.reduce_sum(mask)
return tf.reduce_sum(cross_entropy * mask) / n_tokens
class SRLBilinear(OutputLayer):
def __init__(self, transformer_layer_id, **params):
super(SRLBilinear, self).__init__(transformer_layer_id, **params)
self.predicate_mlp_size = self.static_params['model_config']['predicate_mlp_size']
self.role_mlp_size = self.static_params['model_config']['role_mlp_size']
self.dropout1 = L.Dropout(1 - self.hparams.mlp_dropout, noise_shape=[None, 1, self.hparams.sa_hidden_size])
self.dense1 = L.Dense(self.predicate_mlp_size + self.role_mlp_size,
activation=L.LeakyReLU(alpha=0.1))
self.dropout2 = L.Dropout(1 - self.hparams.mlp_dropout)
self.bilinear = nn_utils.BilinearClassifier(
self.static_params['task_vocab_size'],
1 - self.hparams.bilinear_dropout,
left_input_size=self.predicate_mlp_size,
right_input_size=self.role_mlp_size,
)
self.eval_loss = tf.keras.losses.CategoricalCrossentropy(
from_logits=True,
label_smoothing=self.hparams.label_smoothing,
reduction=tf.keras.losses.Reduction.SUM,
)
@staticmethod
def bool_mask_where_predicates(predicates_tensor, mask):
# TODO this should really be passed in, not assumed...
predicate_outside_idx = 0
return tf.logical_and(tf.not_equal(predicates_tensor, predicate_outside_idx), tf.cast(mask, tf.bool))
def make_call(self, data, predicate_preds_train, predicate_preds_eval, predicate_targets, **kwargs):
'''
:param features: [BATCH_SIZE, SEQ_LEN, hidden_size]
:param mask: [BATCH_SIZE, SEQ_LEN]
:param predicate_preds: [BATCH_SIZE, SEQ_LEN] Predictions from predicates layer with dims
:param targets: [BATCH_SIZE, SEQ_LEN, batch_num_predicates] SRL labels
:param transition_params: [num_labels x num_labels] transition parameters, if doing Viterbi decoding
'''
self.teacher_forcing = not self.in_eval_mode # !!!
features, mask = data
input_shape = tf.shape(features)
batch_size = input_shape[0]
batch_seq_len = input_shape[1]
# indices of predicates
predicate_preds = predicate_preds_train if self.teacher_forcing else predicate_preds_eval
# [PRED_COUNT, 2] (batch_row, sentence_pos for each predicate)
predicate_gather_indices = tf.where(self.bool_mask_where_predicates(predicate_preds, mask))
# (1) project into predicate, role representations
features = self.dropout1(features)
predicate_role_mlp = self.dense1(features) # [BATCH_SIZE, SEQ_LEN, predicate_mlp_size+role_mlp_size]
predicate_mlp = predicate_role_mlp[:, :, :self.predicate_mlp_size] # [BATCH_SIZE, SEQ_LEN, predicate_mlp_size]
role_mlp = predicate_role_mlp[:, :, self.predicate_mlp_size:] # [BATCH_SIZE, SEQ_LEN, role_mlp_size]
# (2) feed through bilinear to obtain scores
# gather just the predicates
# gathered_predicates: num_predicates_in_batch x 1 x predicate_mlp_size
# role mlp: batch x seq_len x role_mlp_size
# gathered roles: need a (batch_seq_len x role_mlp_size) role representation for each predicate,
# i.e. a (num_predicates_in_batch x batch_seq_len x role_mlp_size) tensor
gathered_predicates = tf.expand_dims(tf.gather_nd(predicate_mlp, predicate_gather_indices), 1) # [PRED_COUNT, 1, role_mlp_size]
# AG duplicate dimension
tiled_roles = tf.reshape(tf.tile(role_mlp, [1, batch_seq_len, 1]),
[batch_size, batch_seq_len, batch_seq_len, self.role_mlp_size])
gathered_roles = tf.gather_nd(tiled_roles, predicate_gather_indices) # [PRED_COUNT, SEQ_LEN, HID]
# now multiply them together to get (num_predicates_in_batch x batch_seq_len x num_srl_classes) tensor of scores
srl_logits = self.bilinear([gathered_predicates, gathered_roles]) # [PRED_COUNT, bilin_output_size, SEQ_LEN]
logits_shape = shape_list(srl_logits)
srl_logits = tf.reshape(srl_logits, [-1, logits_shape[2], logits_shape[3]])
srl_logits_transposed = tf.transpose(srl_logits, [0, 2, 1]) # [PRED_COUNT, SEQ_LEN, bilin_output_size]
# num_predicates_in_batch x seq_len
predictions = tf.cast(tf.argmax(srl_logits_transposed, axis=-1), tf.int32) # [PRED_COUNT, SEQ_LEN] (role for each word for each predicate)
# compute loss only on words given in srl_mask
if 'srl_mask' in kwargs:
mask *= kwargs['srl_mask']
# need to repeat each of these once for each target in the sentence
mask_tiled = tf.reshape(tf.tile(mask, [1, batch_seq_len]), [batch_size, batch_seq_len, batch_seq_len])
gather_mask = tf.gather_nd(mask_tiled, predicate_gather_indices)
seq_lens = tf.cast(tf.reduce_sum(gather_mask, 1), tf.int32) # [BATCH_SIZE]
transition_params = self.static_params["transition_params"]
if transition_params is not None and self.in_eval_mode:
num_predicates = shape_list(srl_logits_transposed)[0]
if tf.not_equal(num_predicates, 0):
if 'viterbi' in self.static_params:
unstacked_logits = tf.unstack(srl_logits_transposed, axis=0)
unstacked_predictions = [viterbi_decode(l, transition_params)[0] for l in unstacked_logits]
predictions = tf.stack(unstacked_predictions)
else:
predictions, _ = crf_decode(srl_logits_transposed, transition_params, seq_lens)
output = {
'predictions': predictions,
'scores': srl_logits_transposed, # [PRED_COUNT, SEQ_LEN, bilin_output_size]
'probabilities': tf.nn.softmax(srl_logits_transposed, -1), # [PRED_COUNT, SEQ_LEN, bilin_output_size]
'predicate_preds': predicate_preds,
'predicate_targets': predicate_targets,
'gather_mask': gather_mask,
}
return output
def loss(self, targets, output, mask):
num_labels = self.static_params['task_vocab_size']
transition_params = self.static_params['transition_params']
srl_logits_transposed = output['scores'] # [PRED_COUNT, SEQ_LEN, bilin_output_size]
predicate_preds = output['predicate_preds'] # [BATCH_SIZE, SEQ_LEN] (0/1)
gather_mask = output['gather_mask'] # [PRED_COUNT, SEQ_LEN]
predicate_targets = output['predicate_targets'] # [BATCH_SIZE, SEQ_LEN] (0/1)
seq_lens = tf.cast(tf.reduce_sum(gather_mask, 1), tf.int32) # [PRED_COUNT]
srl_targets = tf.transpose(targets, [0, 2, 1]) # [BATCH_SIZE, max_pred_in_sample, SEQ_LEN]
if not self.teacher_forcing: # compute loss only on correctly predicted predicates
correct_predicate_preds = tf.math.multiply(predicate_targets, tf.cast(predicate_preds, tf.int32))
# correct_predicate_indices = tf.where(correct_predicate_preds)
loss_calculation_mask = tf.gather_nd(predicate_targets, tf.where(predicate_preds)) # [PRED_COUNT]
loss_calculation_ind = tf.squeeze(tf.where(loss_calculation_mask)) # [COR_PRED_COUNT]
srl_logits_correct = tf.gather(srl_logits_transposed, loss_calculation_ind) # [COR_PRED_COUNT,
seq_lens_correct = tf.gather(seq_lens, loss_calculation_ind)
gather_mask_correct = tf.gather(gather_mask, loss_calculation_ind)
else:
correct_predicate_preds = predicate_preds
srl_logits_correct = srl_logits_transposed
seq_lens_correct = seq_lens
gather_mask_correct = gather_mask
correct_predicate_counts = tf.reduce_sum(tf.cast(
self.bool_mask_where_predicates(correct_predicate_preds, mask), tf.int32), -1)
srl_targets_pred_indices = tf.where(tf.sequence_mask(tf.reshape(correct_predicate_counts, [-1])))
srl_targets_predicted_predicates = tf.gather_nd(srl_targets, srl_targets_pred_indices)
if transition_params is not None:
log_likelihood, new_transition_params = crf_log_likelihood(
srl_logits_correct,
srl_targets_predicted_predicates,
seq_lens_correct,
transition_params
)
loss = tf.reduce_mean(-log_likelihood)
if not self.in_eval_mode:
self.static_params['transition_params'] = new_transition_params
else:
num_predicates = shape_list(srl_logits_transposed)[0]
if tf.equal(num_predicates, 0):
return 1e5
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=tf.reshape(srl_logits_correct, [-1, num_labels]),
labels=tf.reshape(srl_targets, [-1])
)
gather_mask_correct = tf.cast(gather_mask_correct, tf.float32)
n_tokens = tf.reduce_sum(gather_mask_correct)
return tf.reduce_sum(
cross_entropy | |
# -*- coding: utf-8 -*-
__module_name__ = "EasyXdcc"
__module_version__ = "1.3"
__module_description__ = "Xdcc Queues"
__module_author__ = "<NAME> <<EMAIL>>"
__module_contributor__ = "Ultrabenosaurus <https://github.com/Ultrabenosaurus/EasyXdcc>"
import xchat, os, time, pprint, platform
class t_bot:
def __init__(self, name, serv, chan):
self.name = name
self.chan = chan
self.serv = serv
self.packs = []
def match (self, name, chan, serv):
return (self.name == name) & (self.chan == chan) & (self.serv == serv)
def __eq__ (self, bot):
if (isinstance(bot, t_bot)) :
return (self.name == bot.name) & (self.chan == bot.chan) & (self.serv == bot.serv)
else :
return False
def add_pack(self, num_pack):
if (type(num_pack) == int):
if not num_pack in self.packs:
self.packs.append(num_pack)
self.packs.sort(reverse=True)
def del_pack(self, num_pack):
if (type(num_pack) == int):
if self.packs.__contains__(num_pack):
del self.packs[self.packs.index(num_pack)]
def pop(self):
return self.packs.pop()
def __len__ (self):
return len(self.packs)
def isActive(self):
list = xchat.get_list("dcc")
if (list):
for i in list:
if i.nick == self.name:
return (i.status == 0) | (i.status == 1) | (i.status == 4)
return False
def __repr__(self):
bot_str = "Bot : "+self.name+" [ "+self.serv+", "+self.chan+"]"+"\n"
for pack in reversed(self.packs):
bot_str += " #"+str(pack)+"\n"
return bot_str
class bot_queue:
def __init__(self):
self.bots = []
def search(self, name, chan, serv):
for i in self.bots:
if (i.match(name, chan, serv)):
return i
return None
def add(self, new_bot):
if isinstance(new_bot, t_bot):
for i in self.bots:
if (i == new_bot):
return
self.bots.append(new_bot)
def del_bot (self, bot):
if isinstance(bot, t_bot):
if bot in self.bots:
del self.bots[self.bots.index(bot)]
def __repr__ (self):
queue_str = "\n"
queue_str += "*****************************\n"
queue_str += "* Queue EasyXdcc *\n"
queue_str += "*****************************\n"
queue_str += "\n"
if len(self.bots) == 0:
queue_str += "No pack(s) queued\n"
queue_str += "\n"
else:
for bot in self.bots:
queue_str += repr(bot)
queue_str += "\n"
return queue_str
def save(self, file_name):
if (type(file_name) == str):
try:
file = open(file_name,'wt')
try:
for bot in self.bots:
file.write(getattr(bot,"name")+"\n")
file.write(getattr(bot,"serv")+"\n")
file.write(getattr(bot,"chan")+"\n")
for pack in getattr(bot,"packs"):
file.write(str(pack)+"\n")
file.write("\n")
finally:
file.close()
except IOError:
pass
def load(self,file_name):
strip_str = "\n\r"
if (type(file_name) == str):
try:
file = open(file_name,'rt')
try:
etat=0
for buffer in file.readlines():
if etat==0:
name = buffer.strip(strip_str)
etat = 1
elif etat==1:
serv = buffer.strip(strip_str)
etat = 2
elif etat==2:
chan = buffer.strip(strip_str)
etat = 3
elif etat==3:
bot = t_bot(name,serv,chan)
self.add(bot)
pack = buffer.strip(strip_str)
if pack == "":
etat=0
else:
bot.add_pack(int(pack))
etat=4
else:
pack = buffer.strip(strip_str)
if pack == "":
etat=0
else:
bot.add_pack(int(pack))
finally:
file.close()
except IOError:
pass
def delqueue(self,file_name):
if (type(file_name) == str):
try:
os.remove(file_name)
except OSError:
pass
def purgequeue(self, file_name):
self.bots = []
delqueue()
def connect(self):
servchan=[]
for bot in self.bots:
if servchan.__contains__(getattr(bot,"serv")):
servchan[servchan.index(getattr(bot,"serv")) + 1].append(getattr(bot,"chan"))
else:
servchan.append(getattr(bot,"serv"))
servchan.append([getattr(bot,"chan")])
for i in range(0,len(servchan),2):
servs = ""
for serv in servchan[i+1]:
servs=servs+serv+","
servs = servs.strip(",")
xchat.command("servchan "+servchan[i]+" 6667 "+servs)
def get_bot_current_chan(bot_name):
global queue
if (type(bot_name) != str):
return None
serv = xchat.get_info("host");
chan = xchat.get_info("channel");
if serv is None or chan is None:
print("Not Connected!")
return xchat.EAT_ALL
bot = queue.search(bot_name, chan, serv)
if bot is None:
bot = t_bot(bot_name, serv, chan)
queue.add(bot)
return bot
def search_bot_current_chan(bot_name):
global queue
if (type(bot_name) != str):
return None
serv = xchat.get_info("host");
chan = xchat.get_info("channel");
if serv is None or chan is None:
print("Not Connected!")
return xchat.EAT_ALL
return queue.search(bot_name, chan, serv)
def help():
print("")
print("*****************************")
print("* EasyXdcc Commands *")
print("*****************************")
print("")
print("Queue a pack :")
print("/XDCC ADD [bot_name] [n°_pack]")
print("")
print("Queue a pack list :")
print("/XDCC ADDL [bot_name] [n°_pack_beg] [n°_pack_end]")
print("")
print("Queue non-sequential pack list :")
print("/XDCC ADDM [bot_name] [n°_pack_1] [n°_pack_2] [...]")
print("")
print("See pack queue :")
print("/XDCC QUEUE")
print("")
print("See pack queue for a bot :")
print("/XDCC QUEUE [bot_name]")
print("")
print("Withdraw a pack from queue :")
print("/XDCC RMP [bot_name] [n°pack]")
print("")
print("Withdraw a pack list from queue :")
print("/XDCC RMPL [bot_name] [n°pack_beg] [N°pack_end]")
print("")
print("Withdraw a non-sequential pack list from queue :")
print("/XDCC RMPM [bot_name] [n°_pack_1] [n°_pack_2] [...]")
print("")
print("Withdraw a bot from queue :")
print("/XDCC RMBOT [bot_name]")
print("")
print("Stop EasyXdcc :")
print("/XDCC STOP")
print("")
print("Start EasyXdcc :")
print("/XDCC START")
print("")
print("Show auto-start status :")
print("/XDCC AUTO")
print("")
print("Toggle auto-start :")
print("/XDCC AUTO [ON|OFF]")
print("")
print("Save Queue :")
print("/XDCC SAVE")
print("")
print("Load Queue :")
print("/XDCC LOAD")
print("")
print("Delete saved Queue file :")
print("/XDCC PURGE")
print("")
return xchat.EAT_ALL
def idx_EasyXdcc(word, word_eol, userdata):
argc = len(word)
if argc == 2:
if word[1] == "start":
return start()
elif word[1] == "stop":
return stop()
elif word[1] == "save":
return save()
elif word[1] == "load":
return load()
elif word[1] == "queue":
return seequeue()
elif word[1] == "help":
return help()
elif word[1] == "purge":
return purgequeue()
elif word[1] == "auto":
return show_auto()
elif argc == 3:
if word[1] == "rmbot":
return rmbot(word[2])
elif word[1] == "queue":
return seebotqueue(word[2])
elif word[1] == "auto":
return toggle_auto(word[2])
elif argc == 4 :
if word[3].isdigit():
if word[1] == "add":
return add(word[2], int(word[3]))
elif word[1] == "rmp":
return rmp(word[2], int(word[3]))
elif argc >= 5:
if word[3].isdigit() & word[4].isdigit():
if word[1] == "addl":
return addl(word[2], int(word[3]), int(word[4]))
elif word[1] == "rmpl":
return rmpl(word[2], int(word[3]), int(word[4]))
elif word[1] == "addm":
return addm(word[2], word[3:])
elif word[1] == "rmpm":
return rmpm(word[2], word[3:])
return xchat.EAT_ALL
def seequeue():
global queue
print(queue)
return xchat.EAT_ALL
def seebotqueue(bot_name):
global queue
if (type(bot_name) != str):
print("/XDCC QUEUE [BOT_NAME]")
return xchat.EAT_ALL
else:
bot = search_bot_current_chan(bot_name)
if bot is not None:
print(bot)
return xchat.EAT_ALL
def show_auto():
if os.path.exists(sav_dir + "autostart"):
print("EasyXdcc : auto-start is currently ON")
else:
print("EasyXdcc : auto-start is currently OFF")
return xchat.EAT_ALL
def toggle_auto(switch):
if 'on' == switch:
if not os.path.exists(sav_dir + "autostart"):
file = open(sav_dir + "autostart", 'wt')
file.close()
xchat.command ("MENU -t1 ADD \"EasyXdcc/Auto-Start\" \"xdcc auto on\" \"xdcc auto off\"")
print("EasyXdcc : auto-start enabled")
if 'off' == switch:
if os.path.exists(sav_dir + "autostart"):
os.remove(sav_dir + "autostart")
xchat.command ("MENU -t0 ADD \"EasyXdcc/Auto-Start\" \"xdcc auto on\" \"xdcc auto off\"")
print("EasyXdcc : auto-start disabled")
return xchat.EAT_ALL
def add(bot_name, num_pack):
global queue
if (type(bot_name) != str) & (type(num_pack) != int):
print("/XDCC ADD BOT_NAME NUM_PACK")
else:
bot = get_bot_current_chan(bot_name)
if bot is not None:
bot.add_pack(num_pack)
print("EasyXdcc : Pack number #"+str(num_pack)+" add to "+bot_name)
return xchat.EAT_ALL
def addl(bot_name, pbeg, pend):
global queue
if (type(bot_name) != str) & (type(pbeg) != int) & (type(pend) != int):
print("/XDCC ADD BOT_NAME NUM_PACK")
else:
bot = get_bot_current_chan(bot_name)
if bot is not None:
for pack in range(pbeg, pend+1):
bot.add_pack(pack)
print("EasyXdcc : Packs number #"+str(pbeg)+" to #"+str(pend)+" add to "+bot_name)
return xchat.EAT_ALL
def addm(bot_name, *pack_nums):
global queue
pack_nums = pack_nums[0]
if (type(bot_name) != str) & (type(pack_nums) != tuple) & (type(pack_nums[0]) != int):
print("/XDCC ADDM BOT_NAME PACK_NUM_1 PACK_NUM_2 ...")
else:
bot = get_bot_current_chan(bot_name)
if bot is not None:
for pack in pack_nums:
bot.add_pack(int(pack))
print("EasyXdcc : add "+str(len(pack_nums))+" Packs to "+bot_name)
return xchat.EAT_ALL
def rmp(bot_name,num_pack):
if (type(bot_name) != str) & (type(num_pack) != int):
print("/XDCC RMP BOT_NAME NUM_PACK")
else:
bot = search_bot_current_chan(bot_name)
if bot is not None:
bot.del_pack(num_pack)
print("EasyXdcc : Pack number #"+str(num_pack)+" remove from "+bot_name)
return xchat.EAT_ALL
def rmpl(bot_name,pbeg,pend):
global queue
if (type(bot_name) != str) & (type(pbeg) != int) & (type(pend) != int):
print("/XDCC RMPL BOT_NAME PACK_BEG PACK_END")
else:
bot = search_bot_current_chan(bot_name)
if bot is not None:
for pack in range(pbeg,pend + 1):
bot.del_pack(pack)
print("EasyXdcc : Pack number #"+str(pbeg)+" to #"+str(pend)+" remove from "+bot_name)
return xchat.EAT_ALL
def rmpm(bot_name, *pack_nums):
global queue
pack_nums = pack_nums[0]
if (type(bot_name) != str) & (type(pack_nums) != tuple) & (type(pack_nums[0]) != int):
print("/XDCC RMPM BOT_NAME PACK_NUM_1 PACK_NUM_2 ...")
else:
bot = get_bot_current_chan(bot_name)
if bot is not None:
for pack in pack_nums:
bot.del_pack(int(pack))
print("EasyXdcc : remove "+str(len(pack_nums))+" Packs from "+bot_name)
return xchat.EAT_ALL
def rmbot(bot_name):
global queue
if (type(bot_name) != str):
print("/XDCC RMBOT BOT_NAME")
else:
bot = search_bot_current_chan(bot_name)
if bot is not None:
queue.del_bot(bot)
print("EasyXdcc : "+bot_name+" removed from queue")
return xchat.EAT_ALL
def save():
global queue,sav_file
queue.save(sav_file)
print("Queue(s) state saved")
return xchat.EAT_ALL
def load():
global queue,sav_file
queue.load(sav_file)
# queue.connect()
print("Queue(s) state loaded")
return xchat.EAT_ALL
def delqueue():
global queue,sav_file
queue.delqueue(sav_file)
print("Queue file deleted")
return xchat.EAT_ALL
def purgequeue():
global queue,sav_file
queue.purgequeue(sav_file)
print("Queue file deleted")
return xchat.EAT_ALL
def start():
global my_hook
if my_hook is None:
my_hook = xchat.hook_timer(10000, launch_dl)
print("EasyXdcc started")
launch_dl(None)
return xchat.EAT_ALL
def stop():
global my_hook
if my_hook is not None:
xchat.unhook(my_hook)
my_hook = None
print("EasyXdcc stoped")
return xchat.EAT_ALL
def launch_dl(userdata):
global queue, my_hook
if None == xchat.get_info("server"):
xchat.unhook(my_hook)
my_hook = xchat.hook_timer(10000,server_check)
else:
for bot in getattr(queue, | |
403
_strings['wind'] = 404
_strings['dew point'] = 405
_strings['humidity'] = 406
_strings['defaults'] = 409
_strings['accessing weather service'] = 410
_strings['getting weather for:'] = 411
_strings['unable to get weather data'] = 412
_strings['manual'] = 413
_strings['no review for this album'] = 414
_strings['downloading thumbnail...'] = 415
_strings['not available'] = 416
_strings['view: big icons'] = 417
_strings['low'] = 418
_strings['high'] = 419
_strings['keep audio device alive'] = 421
_strings['delete cd information'] = 423
_strings['select'] = 424
_strings['no album information found'] = 425
_strings['no cd information found'] = 426
_strings['disc'] = 427
_strings['please insert the following disc:'] = 429
_strings['sort by: dvd#'] = 430
_strings['no cache'] = 431
_strings['remove movie from library'] = 432
_strings['from %s at %i %s'] = 434
_strings['no optical disc drive detected'] = 435
_strings['you need an optical disc drive to play this video'] = 436
_strings['removable disk'] = 437
_strings['opening file'] = 438
_strings['cache'] = 439
_strings['hard disk'] = 440
_strings['udf'] = 441
_strings['local network'] = 442
_strings['internet'] = 443
_strings['video'] = 444
_strings['audio'] = 445
_strings['dvd'] = 446
_strings['autorun media'] = 447
_strings['dolby digital plus (e-ac3) capable receiver'] = 448
_strings['enabled'] = 449
_strings['columns'] = 450
_strings['row 1 address'] = 451
_strings['row 2 address'] = 452
_strings['row 3 address'] = 453
_strings['row 4 address'] = 454
_strings['rows'] = 455
_strings['mode'] = 456
_strings['switch view'] = 457
_strings['limit sampling rate (khz)'] = 458
_strings['subs'] = 459
_strings['audio stream'] = 460
_strings['[active]'] = 461
_strings['subtitle'] = 462
_strings['backlight'] = 463
_strings['brightness'] = 464
_strings['contrast'] = 465
_strings['gamma'] = 466
_strings['type'] = 467
_strings['move the bar to change the osd position'] = 468
_strings['osd position'] = 469
_strings['credits'] = 470
_strings['off'] = 474
_strings['music only'] = 475
_strings['music & video'] = 476
_strings['unable to load playlist'] = 477
_strings['osd'] = 478
_strings['skin & language'] = 479
_strings['appearance'] = 480
_strings['audio options'] = 481
_strings['about kodi'] = 482
_strings['delete album'] = 485
_strings['repeat'] = 486
_strings['repeat one'] = 487
_strings['repeat folder'] = 488
_strings['- use big icons'] = 491
_strings['resize vobsubs'] = 492
_strings['overall audio headroom'] = 494
_strings['calibration'] = 496
_strings['show file extensions'] = 497
_strings['sort by: type'] = 498
_strings['unable to connect to online lookup service'] = 499
_strings['downloading album information failed'] = 500
_strings['looking for album names...'] = 501
_strings['open'] = 502
_strings['busy'] = 503
_strings['empty'] = 504
_strings['sort by: usage'] = 507
_strings['enable visualisations'] = 510
_strings['enable video mode switching'] = 511
_strings['startup window'] = 512
_strings['home window'] = 513
_strings['manual settings'] = 514
_strings['genre'] = 515
_strings['recently played albums'] = 517
_strings['launch'] = 518
_strings['launch in...'] = 519
_strings['compilations'] = 521
_strings['remove source'] = 522
_strings['switch media'] = 523
_strings['select playlist'] = 524
_strings['new playlist...'] = 525
_strings['add to playlist'] = 526
_strings['manually add to library'] = 527
_strings['enter title'] = 528
_strings['error: duplicate title'] = 529
_strings['select genre'] = 530
_strings['new genre'] = 531
_strings['manual addition'] = 532
_strings['enter genre'] = 533
_strings['view: %s'] = 534
_strings['list'] = 535
_strings['icons'] = 536
_strings['big list'] = 537
_strings['big icons'] = 538
_strings['wide'] = 539
_strings['big wide'] = 540
_strings['album icons'] = 541
_strings['dvd icons'] = 542
_strings['dvd'] = 543
_strings['media info'] = 544
_strings['audio output device'] = 545
_strings['passthrough output device'] = 546
_strings['no biography for this artist'] = 547
_strings['downmix multichannel audio to stereo'] = 548
_strings['number'] = 549
_strings['sort by: %s'] = 550
_strings['name'] = 551
_strings['date'] = 552
_strings['size'] = 553
_strings['track'] = 554
_strings['time'] = 555
_strings['title'] = 556
_strings['artist'] = 557
_strings['album'] = 558
_strings['playlist'] = 559
_strings['id'] = 560
_strings['file'] = 561
_strings['year'] = 562
_strings['rating'] = 563
_strings['type'] = 564
_strings['usage'] = 565
_strings['album artist'] = 566
_strings['play count'] = 567
_strings['last played'] = 568
_strings['comment'] = 569
_strings['date added'] = 570
_strings['default'] = 571
_strings['studio'] = 572
_strings['path'] = 573
_strings['country'] = 574
_strings['in progress'] = 575
_strings['times played'] = 576
_strings['sort direction'] = 580
_strings['sort method'] = 581
_strings['view mode'] = 582
_strings['remember views for different folders'] = 583
_strings['ascending'] = 584
_strings['descending'] = 585
_strings['edit playlist'] = 586
_strings['filter'] = 587
_strings['cancel party mode'] = 588
_strings['party mode'] = 589
_strings['random'] = 590
_strings['off'] = 591
_strings['one'] = 592
_strings['all'] = 593
_strings['off'] = 594
_strings['repeat: off'] = 595
_strings['repeat: one'] = 596
_strings['repeat: all'] = 597
_strings['rip audio cd'] = 600
_strings['medium'] = 601
_strings['standard'] = 602
_strings['extreme'] = 603
_strings['constant bitrate'] = 604
_strings['ripping...'] = 605
_strings['to:'] = 607
_strings['rip audio track'] = 610
_strings['enter number'] = 611
_strings['bits/sample'] = 612
_strings['sample rate'] = 613
_strings['virtual folder'] = 614
_strings['audio cds'] = 620
_strings['encoder'] = 621
_strings['quality'] = 622
_strings['bitrate'] = 623
_strings['include track number'] = 624
_strings['all songs of'] = 625
_strings['in progress tv shows'] = 626
_strings['view mode'] = 629
_strings['normal'] = 630
_strings['zoom'] = 631
_strings['stretch 4:3'] = 632
_strings['stretch 16:9'] = 634
_strings['custom'] = 636
_strings['replaygain'] = 637
_strings['use track levels'] = 639
_strings['use album levels'] = 640
_strings['need to unpack a big file. continue?'] = 645
_strings['remove from library'] = 646
_strings['export video library'] = 647
_strings['import video library'] = 648
_strings['importing'] = 649
_strings['exporting'] = 650
_strings['browse for library'] = 651
_strings['years'] = 652
_strings['update library'] = 653
_strings['browse for executable'] = 655
_strings['browse for playlist'] = 656
_strings['browse for folder'] = 657
_strings['song information'] = 658
_strings['non-linear stretch'] = 659
_strings['volume amplification'] = 660
_strings['choose export folder'] = 661
_strings['this file is no longer available.'] = 662
_strings['would you like to remove it from the library?'] = 663
_strings['compression level'] = 665
_strings['enable component-specific logging'] = 666
_strings['specify component-specific logging...'] = 668
_strings['cleaning up library'] = 700
_strings['removing old songs from the library'] = 701
_strings['this path has been scanned before'] = 702
_strings['network'] = 705
_strings['server'] = 706
_strings['internet protocol (ip)'] = 711
_strings['invalid port specified. value must be between 1 and 65535.'] = 712
_strings['http proxy'] = 713
_strings['assignment'] = 715
_strings['automatic (dhcp)'] = 716
_strings['ip address'] = 719
_strings['netmask'] = 720
_strings['default gateway'] = 721
_strings['dns server'] = 722
_strings['save & restart'] = 723
_strings['invalid address specified. value must be aaa.bbb.ccc.ddd'] = 724
_strings['with numbers between 0 and 255.'] = 725
_strings['changes not saved. continue without saving?'] = 726
_strings['web server'] = 727
_strings['ftp server'] = 728
_strings['port'] = 730
_strings['save & apply'] = 732
_strings['password'] = <PASSWORD>
_strings['no pass'] = 734
_strings['character set'] = 735
_strings['style'] = 736
_strings['colour'] = 737
_strings['normal'] = 738
_strings['bold'] = 739
_strings['italics'] = 740
_strings['bold italics'] = 741
_strings['white'] = 742
_strings['yellow'] = 743
_strings['files'] = 744
_strings['error loading image'] = 747
_strings['edit path'] = 748
_strings['mirror image'] = 749
_strings['are you sure?'] = 750
_strings['removing source'] = 751
_strings['add program link'] = 754
_strings['edit program path'] = 755
_strings['edit program name'] = 756
_strings['edit path depth'] = 757
_strings['view: big list'] = 759
_strings['yellow'] = 760
_strings['white'] = 761
_strings['blue'] = 762
_strings['bright green'] = 763
_strings['yellow green'] = 764
_strings['cyan'] = 765
_strings['light grey'] = 766
_strings['grey'] = 767
_strings['error %i: share not available'] = 770
_strings['seeking'] = 773
_strings['slideshow folder'] = 774
_strings['network interface'] = 775
_strings['wireless network name (essid)'] = 776
_strings['wireless password'] = <PASSWORD>
_strings['wireless security'] = 778
_strings['save and apply network interface settings'] = 779
_strings['no encryption'] = 780
_strings['wep'] = 781
_strings['wpa'] = 782
_strings['wpa2'] = 783
_strings['applying network interface settings. please wait.'] = 784
_strings['network interface restarted successfully.'] = 785
_strings['network interface did not start successfully.'] = 786
_strings['interface disabled'] = 787
_strings['network interface disabled successfully.'] = 788
_strings['wireless network name (essid)'] = 789
_strings['remote control'] = 790
_strings['port'] = 792
_strings['port range'] = 793
_strings['initial repeat delay (ms)'] = 795
_strings['continuous repeat delay (ms)'] = 796
_strings['maximum number of clients'] = 797
_strings['internet access'] = 798
_strings['%s of %s available'] = 802
_strings['invalid port number entered'] = 850
_strings['valid port range is 1-65535'] = 851
_strings['valid port range is 1024-65535'] = 852
_strings['preview'] = 1000
_strings['unable to connect'] = 1001
_strings['ip address'] = 1006
_strings['add network location'] = 1007
_strings['protocol'] = 1008
_strings['server address'] = 1009
_strings['server name'] = 1010
_strings['remote path'] = 1011
_strings['shared folder'] = 1012
_strings['port'] = 1013
_strings['username'] = 1014
_strings['browse for network server'] = 1015
_strings['enter the network address of the server'] = 1016
_strings['enter the path on the server'] = 1017
_strings['enter the port number'] = 1018
_strings['enter the username'] = 1019
_strings['enter the paths or browse for the media locations.'] = 1021
_strings['browse for new share'] = 1023
_strings['browse'] = 1024
_strings['add source'] = 1026
_strings['edit source'] = 1027
_strings['enter the new label'] = 1029
_strings['browse for image'] = 1030
_strings['browse for image folder'] = 1031
_strings['add network location...'] = 1032
_strings['browse for file'] = 1033
_strings['submenu'] = 1034
_strings['enable submenu buttons'] = 1035
_strings['favourites'] = 1036
_strings['loading directory'] = 1040
_strings['retrieved %i items'] = 1041
_strings['retrieved %i of %i items'] = 1042
_strings['set plug-in thumb'] = 1044
_strings['access points'] = 1046
_strings['other...'] = 1047
_strings['username'] = 1048
_strings['script settings'] = 1049
_strings['singles'] = 1050
_strings['enter web address'] = 1051
_strings['proxy type'] = 1180
_strings['http'] = 1181
_strings['socks4'] = 1182
_strings['socks4a'] = 1183
_strings['socks5'] = 1184
_strings['smb client'] = 1200
_strings['workgroup'] = 1202
_strings['default username'] = 1203
_strings['default password'] = <PASSWORD>
_strings['wins server'] = 1207
_strings['mount smb shares'] = 1208
_strings['remove'] = 1210
_strings['music'] = 1211
_strings['video'] = 1212
_strings['pictures'] = 1213
_strings['files'] = 1214
_strings['music & video '] = 1215
_strings['music & pictures'] = 1216
_strings['music & files'] = 1217
_strings['video & pictures'] = 1218
_strings['video & files'] = 1219
_strings['pictures & files'] = 1220
_strings['music & video & pictures'] = 1221
_strings['music & video & pictures & files'] = 1222
_strings['disabled'] = 1223
_strings['files & music & video'] = 1226
_strings['files & pictures & music'] = 1227
_strings['files & pictures & video'] = 1228
_strings['music & programs'] = 1229
_strings['video & programs'] = 1230
_strings['pictures & programs'] = 1231
_strings['music & video & pictures & programs'] = 1232
_strings['programs & video & music'] = 1233
_strings['programs & pictures & music'] = 1234
_strings['programs & pictures & video'] = 1235
_strings['zeroconf'] = 1259
_strings['allow volume control'] | |
clic en este botón para descargar el archivo de configuración genererado con el botón 'Generar Configuración' (una vez este haya notificado su ejecución). Se descargará un archivo .JSON que se alojarán en la carpeta <i>cno_solar/configurations/<span style='color:blue'>system_config.json</span></i>. El ícono y la descripción del botón cambiarán para notificar la descarga del archivo.</li>
</ul>
''', layout=widgets.Layout(height='auto'))
ac_documentation = widgets.Accordion(children=[doc_location, doc_inverter, doc_module, doc_sysdesign])
ac_documentation.set_title(0, 'Tab Ubicación')
ac_documentation.set_title(1, 'Tab Inversor')
ac_documentation.set_title(2, 'Tab Módulo')
ac_documentation.set_title(3, 'Tab Diseño Planta')
tab_doc = widgets.Box([widgets.HTML('<h4>Documentación</h4>', layout=widgets.Layout(height='auto')),
widgets.VBox([widgets.Box([ac_documentation], layout=gui_layout)])],
layout=widgets.Layout(display='flex',
flex_flow='column',
border='solid 0px',
align_items='stretch',
width='100%'))
###############################
# LOCATION TAB #
###############################
surfaces = {'': None,
'Urbano': 'urban',
'Césped': 'grass',
'Césped Fresco': 'fresh grass',
'Tierra': 'soil',
'Arena': 'sand',
'Nieve': 'snow',
'Nieve Fresca': 'fresh snow',
'Asfalto': 'asphalt',
'Hormigón': 'concrete',
'Aluminio': 'aluminum',
'Cobre': 'copper',
'Acero': 'fresh steel',
'Acero Sucio': 'dirty steel',
'Mar': 'sea'}
gui_layout = widgets.Layout(display='flex',
flex_flow='row',
justify_content='space-between')
w_latitude = widgets.FloatText(value=0,
step=0.001,
description='',
disabled=False,
style={'description_width': 'initial'})
w_longitude = widgets.FloatText(value=0,
step=0.01,
description='',
disabled=False,
style={'description_width': 'initial'})
w_altitude = widgets.FloatText(value=0,
step=1,
description='',
disabled=False,
style={'description_width': 'initial'})
w_timezone = widgets.Dropdown(options=pytz.all_timezones,
value='America/Bogota',
description='',
style={'description_width': 'initial'})
w_surface = widgets.Dropdown(options=surfaces,
value=None,
description='',
style={'description_width': 'initial'})
w_albedo = widgets.BoundedFloatText(value=None,
step=0.01,
min=0,
max=1,
description='',
disabled=False,
style={'description_width': 'initial'})
def handle_surface_change(change):
if change.new != None:
w_albedo.value = pvlib.irradiance.SURFACE_ALBEDOS[change.new]
w_surface.observe(handle_surface_change, names='value')
widget_location = [widgets.Box([widgets.HTML('<h4>Información Geográfica</h4>', layout=widgets.Layout(height='auto'))]),
widgets.Box([widgets.Label('Latitud'), w_latitude], layout=gui_layout),
widgets.Box([widgets.Label('Longitud'), w_longitude], layout=gui_layout),
widgets.Box([widgets.Label('Altitud [m.s.n.m]'), w_altitude], layout=gui_layout),
widgets.Box([widgets.Label('Huso Horario'), w_timezone], layout=gui_layout),
widgets.Box([widgets.Label('Superficie'), w_surface], layout=gui_layout),
widgets.Box([widgets.Label('Albedo [%]'), w_albedo], layout=gui_layout)]
tab_location = widgets.Box(widget_location, layout=widgets.Layout(display='flex',
flex_flow='column',
border='solid 0px',
align_items='stretch',
width='50%'))
###############################
# INVERTER TAB #
###############################
inv_repo = {'': None,
'CEC': 'CECInverter',
'Sandia': 'SandiaInverter',
'<NAME>': 'ADRInverter'}
gui_layout = widgets.Layout(display='flex',
flex_flow='row',
justify_content='space-between')
inverter_btn = widgets.ToggleButtons(value=None,
options=['Repositorio', 'PVsyst', 'Manual'],
description='',
disabled=False,
button_style='',
tooltips=['Base de datos de PVlib',
'Importar desde PVsyst',
'Configuración manual'])
# REPOSITORY
# Repository Widgets
inverter_vbox = widgets.VBox([inverter_btn])
dropdown_invrepo = widgets.Dropdown(options=inv_repo,
value=None,
description='',
style={'description_width': 'initial'})
dropdown_manufac = widgets.Dropdown(options='',
value=None,
disabled=True,
description='',
style={'description_width': 'initial'})
w_dropinvrepo = widgets.VBox([widgets.Box([widgets.Label('Repositorio'), dropdown_invrepo], layout=gui_layout)])
w_dropmanufac = widgets.VBox([widgets.Box([widgets.Label('Fabricantes'), dropdown_manufac], layout=gui_layout)])
# PVsyst Widgets
class SelectFilesButton(widgets.Button):
'''A file widget that leverages tkinter.filedialog'''
def __init__(self):
super(SelectFilesButton, self).__init__()
# Add the selected_files trait
self.add_traits(files=traitlets.traitlets.Any()) # List()
# Create the button
self.description = 'Seleccionar'
self.icon = 'square-o'
self.layout = widgets.Layout(width='34%', height='auto')
# Set on click behavior
self.on_click(self.select_files)
@staticmethod
def select_files(b):
'''Generate instance of tkinter.filedialog '''
# Create Tk root
root = Tk()
# Hide the main window
root.withdraw()
# Raise the root to the top of all windows
root.call('wm', 'attributes', '.', '-topmost', True)
# List of selected fileswill be set to b.value
b.files = filedialog.askopenfilename(filetypes=(('OND Files', '.OND'),),
multiple=False,
title='Select OND Data File')
b.description = 'Seleccionado'
b.icon = 'check-square-o'
upload_btn = SelectFilesButton()
btn = widgets.Button(value=False,
description='Cargar OND',
disabled=False,
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltip='Cargar los archivos .OND',
icon='circle',
layout=widgets.Layout(width='34%', height='auto'))
btn.add_traits(files=traitlets.traitlets.Dict())
w_upload = widgets.VBox([widgets.Box([widgets.HTML('<h5> </h5>', layout=widgets.Layout(height='auto'))]),
widgets.Box([widgets.Label('Archivo Inversor (.OND)'), upload_btn, btn], layout=gui_layout)])
# Manual Widgets
dropdown_manual = widgets.Dropdown(options=['', 'SNL PVlib', 'NREL PVWatts'],
value=None,
description='')
w_dropmanual = widgets.VBox([widgets.Box([widgets.Label('Formato de Configuración'), dropdown_manual], layout=gui_layout)])
def handle_toggle(change):
if change['new'] == 'Repositorio':
inverter_vbox.children = [inverter_btn, w_dropinvrepo, w_dropmanufac]
elif change['new'] == 'PVsyst':
inverter_vbox.children = [inverter_btn, w_upload]
elif change['new'] == 'Manual':
inverter_vbox.children = [inverter_btn, w_dropmanual]
def handle_dropdown_manuf(change):
inverters = pvlib.pvsystem.retrieve_sam(change['new'])
manufacturers = []
manufacturers.append('')
for string in inverters.transpose().index:
manufacturers.append(string[:string.index('__')])
manufacturers.append(change['new'])
dropdown_manufac.options = list(pd.unique(manufacturers))
dropdown_manufac.disabled = False
inverter_vbox.children = [inverter_btn, w_dropinvrepo, w_dropmanufac]
def handle_dropdown_repo(change):
inverters = pvlib.pvsystem.retrieve_sam(dropdown_manufac.options[-1])
matching = [s for s in inverters.transpose().index if change['new'] in s]
inv_options = list(inverters[matching].transpose().index)
inv_options.insert(0, '')
inv_drop = widgets.Dropdown(options=inv_options,
value=None,
description='',
style={'description_width': 'initial'})
w_dropinv = widgets.VBox([widgets.Box([widgets.Label('Inversores'), inv_drop], layout=gui_layout)])
inverter_vbox.children = [inverter_btn, w_dropinvrepo, w_dropmanufac, w_dropinv]
# PVSYST
def on_button_clicked(obj):
btn.description = 'OND Cargado'
btn.icon = 'check-circle'
with output:
output.clear_output()
ond = pvsyst.ond_to_inverter_param(path=upload_btn.files)
inverter = {'Vac': float(ond['pvGInverter']['TConverter']['VOutConv']), # Voltaje de red (Parámetros principales)
'Pso': float(ond['pvGInverter']['TConverter']['PLim1']), # Pthresh
'Paco': float(ond['pvGInverter']['TConverter']['PNomConv'])*1000, # Potencia CA máxima
'Pdco': float(ond['pvGInverter']['TConverter']['PNomDC'])*1000, # Potencia FV nominal
'pdc0': float(ond['pvGInverter']['TConverter']['PNomDC'])*1000,
'Vdco': float(ond['pvGInverter']['TConverter']['VNomEff'].split(',')[1]), # Voltaje medio
'Pnt': float(ond['pvGInverter']['Night_Loss']), # Night Loss
'Vdcmax': float(ond['pvGInverter']['TConverter']['VAbsMax']), # Alto voltaje -- Voltaje de entrada (Curva de eficiencia)
'Idcmax': float(ond['pvGInverter']['TConverter']['IMaxDC']),
'Mppt_low': float(ond['pvGInverter']['TConverter']['VMppMin']), # Vmín@Pnom
'Mppt_high': float(ond['pvGInverter']['TConverter']['VMPPMax']), # Alto Voltaje
'eta_inv_nom': float(ond['pvGInverter']['TConverter']['EfficEuro']),
'eta_inv_ref': 0.9637,
'Name': ond['pvGInverter']['pvCommercial']['Model']}
btn.files = {'inv': inverter}
# MANUAL
def handle_dropdown_manual(change):
if change['new'] == 'SNL PVlib':
w_Paco = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_Pdco = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_Vdco = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_Pso = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_C0 = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_C1 = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_C2 = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_C3 = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_Pnt = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
inv_conf = widgets.VBox([widgets.Box([widgets.HTML('<h5>Configuración SNL PVlib</h5>', layout=widgets.Layout(height='auto'))]),
widgets.Box([widgets.Label('$P_{AC}$ Nominal [W]'), w_Paco], layout=gui_layout),
widgets.Box([widgets.Label('$P_{DC}$ Nominal [W]'), w_Pdco], layout=gui_layout),
widgets.Box([widgets.Label('$V_{DC}$ Nominal [V]'), w_Vdco], layout=gui_layout),
widgets.Box([widgets.Label('$P_{DC}$ de Arraque [W]'), w_Pso], layout=gui_layout),
widgets.Box([widgets.Label('$C_0$ [1/W]'), w_C0], layout=gui_layout),
widgets.Box([widgets.Label('$C_1$ [1/V]'), w_C1], layout=gui_layout),
widgets.Box([widgets.Label('$C_2$ [1/V]'), w_C2], layout=gui_layout),
widgets.Box([widgets.Label('$C_3$ [1/V]'), w_C3], layout=gui_layout),
widgets.Box([widgets.Label('$P_{AC}$ Consumo Nocturno [W]'), w_Pnt], layout=gui_layout)])
inverter_vbox.children = [inverter_btn, w_dropmanual, inv_conf]
else:
w_pdc0 = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_eta_inv_nom = widgets.BoundedFloatText(value=None, min=0, max=1, step=0.01, description='', style={'description_width': 'initial'})
inv_conf = widgets.VBox([widgets.Box([widgets.HTML('<h5>Configuración NREL PVWatts</h5>', layout=widgets.Layout(height='auto'))]),
widgets.Box([widgets.Label('$P_{DC}$ Nominal [W]'), w_pdc0], layout=gui_layout),
widgets.Box([widgets.Label('Eficiencia Nominal [ad.]'), w_eta_inv_nom], layout=gui_layout)])
inverter_vbox.children = [inverter_btn, w_dropmanual, inv_conf]
# OBSERVE
inverter_btn.observe(handle_toggle, 'value')
dropdown_invrepo.observe(handle_dropdown_manuf, 'value')
dropdown_manufac.observe(handle_dropdown_repo, 'value')
btn.on_click(on_button_clicked)
dropdown_manual.observe(handle_dropdown_manual, 'value')
# TAB
tab_inverter = widgets.Box([widgets.HTML("<h4>Método de Configuración</h4>", layout=widgets.Layout(height='auto')),
inverter_vbox],
layout=widgets.Layout(display='flex',
flex_flow='column',
border='solid 0px',
align_items='stretch',
width='50%'))
###############################
# MODULE TAB #
###############################
mod_repo = {'': None,
'PVFree': 'PVFree',
'CEC': 'CECMod',
'Sandia': 'SandiaMod'}
gui_layout = widgets.Layout(display='flex',
flex_flow='row',
justify_content='space-between')
module_btn = widgets.ToggleButtons(value=None,
options=['Repositorio', 'PVsyst', 'Manual'],
description='',
disabled=False,
button_style='', # 'success', 'info', 'warning', 'danger' or ''
tooltips=['Base de datos de PVlib',
'Importar desde PVsyst',
'Configuración manual'])
# REPOSITORY
# Repository Widgets
module_vbox = widgets.VBox([module_btn])
dropdown_modrepo = widgets.Dropdown(options=mod_repo,
value=None,
description='',
style={'description_width': 'initial'})
dropdown_modmanu = widgets.Dropdown(options='',
value=None,
disabled=True,
description='',
style={'description_width': 'initial'})
w_dropmodrepo = widgets.VBox([widgets.Box([widgets.Label('Repositorio'), dropdown_modrepo], layout=gui_layout)])
w_dropmodmanu = widgets.VBox([widgets.Box([widgets.Label('Fabricantes'), dropdown_modmanu], layout=gui_layout)])
# PVsyst Widgets
class SelectPANButton(widgets.Button):
'''A file widget that leverages tkinter.filedialog'''
def __init__(self):
super(SelectPANButton, self).__init__()
# Add the selected_files trait
self.add_traits(files=traitlets.traitlets.Any()) # List()
# Create the button
self.description = 'Seleccionar'
self.icon = 'square-o'
self.layout = widgets.Layout(width='34%', height='auto')
# Set on click behavior
self.on_click(self.select_files)
@staticmethod
def select_files(b):
'''Generate instance of tkinter.filedialog '''
# Create Tk root
root = Tk()
# Hide the main window
root.withdraw()
# Raise the root to the top of all windows
root.call('wm', 'attributes', '.', '-topmost', True)
# List of selected fileswill be set to b.value
b.files = filedialog.askopenfilename(filetypes=(('PAN Files', '.PAN'),),
multiple=False,
title='Select PAN Data File')
b.description = 'Seleccionado'
b.icon = 'check-square-o'
upload_modbtn = SelectPANButton()
modbtn = widgets.Button(value=False,
description='Cargar PAN',
disabled=False,
button_style='',
tooltip='Cargar los archivos .PAN',
icon='circle',
layout=widgets.Layout(width='34%', height='auto'))
modbtn.add_traits(files=traitlets.traitlets.Dict())
modbtn_output = widgets.Output()
w_modupload = widgets.VBox([widgets.Box([widgets.HTML('<h5> </h5>', layout=widgets.Layout(height='auto'))]),
widgets.Box([widgets.Label('Archivo Módulo (.PAN)'), upload_modbtn, modbtn], layout=gui_layout)])
# Manual Widgets
dropdown_modmanual = widgets.Dropdown(options=['', 'SNL PVlib', 'NREL PVWatts'],
value=None,
description='Método',
style={'description_width': 'initial'})
# BIFACIAL PARAMETERS
dropdown_bifacial = widgets.Dropdown(options=[('Sí', True), ('No', False)],
value=False,
description='',
style={'description_width': 'initial'})
w_dropbrifacial = widgets.VBox([widgets.Box([widgets.Label('Panel Bifacial'), dropdown_bifacial], layout=gui_layout)])
bifacial_vbox = widgets.VBox([w_dropbrifacial])
def handle_modtoggle(change):
if change['new'] == 'Repositorio':
module_vbox.children = [module_btn, w_dropmodrepo]
elif change['new'] == 'PVsyst':
module_vbox.children = [module_btn, w_modupload]
elif change['new'] == 'Manual':
w_T_NOCT = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_Type = widgets.Dropdown(options=[('Mono-Si', 'monoSi'), ('Multi-Si', 'multiSi'), ('Poli-Si', 'polySi'), ('CIS', 'cis'), ('CIGS', 'cigs'), ('CdTe', 'cdte'), ('Amorfo', 'amorphous')], value=None, description='', style={'description_width': 'initial'})
w_N_s = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_I_sc_ref = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_V_oc_ref = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_I_mp_ref = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_V_mp_ref = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_alpha_sc = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_beta_oc = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_gamma_r = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
w_STC = widgets.FloatText(value=None, description='', style={'description_width': 'initial'})
mod_conf = widgets.VBox([widgets.Box([widgets.HTML('<h5>Configuración Módulo</h5>', layout=widgets.Layout(height='auto'))]),
widgets.Box([widgets.Label('$T_{NOCT}$ [ºC]'), w_T_NOCT], layout=gui_layout),
widgets.Box([widgets.Label('Tecnología'), w_Type], layout=gui_layout),
widgets.Box([widgets.Label('Número Celdas'), w_N_s], layout=gui_layout),
widgets.Box([widgets.Label('$I_{SC}$ en STC [A]'), w_I_sc_ref], layout=gui_layout),
widgets.Box([widgets.Label('$V_{OC}$ en STC [V]'), w_V_oc_ref], layout=gui_layout),
widgets.Box([widgets.Label('$I_{MP}$ en STC [A]'), w_I_mp_ref], layout=gui_layout),
widgets.Box([widgets.Label('$V_{MP}$ en STC [A]'), w_V_mp_ref], layout=gui_layout),
widgets.Box([widgets.Label('Coef. Temp. $I_{SC}$ [A/ºC]'), w_alpha_sc], layout=gui_layout),
widgets.Box([widgets.Label('Coef. Temp. $V_{OC}$ [V/ºC]'), w_beta_oc], layout=gui_layout),
widgets.Box([widgets.Label('Coef. Temp. $P_{MP}$ [%/ºC]'), w_gamma_r], layout=gui_layout),
widgets.Box([widgets.Label('$P_{Nominal}$ en STC [W]'), w_STC], layout=gui_layout)])
module_vbox.children = [module_btn, mod_conf]
def handle_dropdown_modmanuf(change):
if change['new'] == 'PVFree':
dropdown_pvfree = widgets.Dropdown(options=['', 'pvmodule', 'cecmodule'],
value=None,
description='',
style={'description_width': 'initial'})
pvfree_id | |
<reponame>a-palchikov/z3c.rml
##############################################################################
#
# Copyright (c) 2007 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Chart Element Processing
"""
import reportlab.lib.formatters
from reportlab.graphics import shapes
from reportlab.graphics.charts import barcharts, lineplots, piecharts
from reportlab.graphics.charts import spider, doughnut
from z3c.rml import attr, directive, interfaces, occurence
# Patches against Reportlab 2.0
lineplots.Formatter = reportlab.lib.formatters.Formatter
class PropertyItem(directive.RMLDirective):
def process(self):
attrs = dict(self.getAttributeValues())
self.parent.dataList.append(attrs)
class PropertyCollection(directive.RMLDirective):
propertyName = None
def processAttributes(self):
prop = getattr(self.parent.context, self.propertyName)
# Get global properties
for name, value in self.getAttributeValues():
setattr(prop, name, value)
def process(self):
self.processAttributes()
# Get item specific properties
prop = getattr(self.parent.context, self.propertyName)
self.dataList = []
self.processSubDirectives()
for index, data in enumerate(self.dataList):
for name, value in data.items():
setattr(prop[index], name, value)
class IText(interfaces.IRMLDirectiveSignature):
"""Draw a text on the chart."""
x = attr.Measurement(
title=u'X-Coordinate',
description=(u'The X-coordinate of the lower-left position of the '
u'text.'),
required=True)
y = attr.Measurement(
title=u'Y-Coordinate',
description=(u'The Y-coordinate of the lower-left position of the '
u'text.'),
required=True)
angle = attr.Float(
title=u'Rotation Angle',
description=(u'The angle about which the text will be rotated.'),
required=False)
text = attr.TextNode(
title=u'Text',
description=u'The text to be printed.',
required=True)
fontName = attr.String(
title=u'Font Name',
description=u'The name of the font.',
required=False)
fontSize = attr.Measurement(
title=u'Font Size',
description=u'The font size for the text.',
required=False)
fillColor = attr.Color(
title=u'Fill Color',
description=u'The color in which the text will appear.',
required=False)
textAnchor = attr.Choice(
title=u'Text Anchor',
description=u'The position in the text to which the coordinates refer.',
choices=('start', 'middle', 'end', 'boxauto'),
required=False)
class Text(directive.RMLDirective):
signature = IText
def process(self):
attrs = dict(self.getAttributeValues())
string = shapes.String(
attrs.pop('x'), attrs.pop('y'), attrs.pop('text'))
angle = attrs.pop('angle', 0)
for name, value in attrs.items():
setattr(string, name, value)
group = shapes.Group(string)
group.translate(0,0)
group.rotate(angle)
self.parent.parent.drawing.add(group)
class ITexts(interfaces.IRMLDirectiveSignature):
"""A set of texts drawn on the chart."""
occurence.containing(
occurence.ZeroOrMore('text', IText)
)
class Texts(directive.RMLDirective):
signature = ITexts
factories = {'text': Text}
class Series(directive.RMLDirective):
def process(self):
attrs = self.getAttributeValues(valuesOnly=True)
self.parent.data.append(attrs[0])
class Data(directive.RMLDirective):
series = None
def process(self):
self.data = []
self.factories = {'series': self.series}
self.processSubDirectives()
self.parent.context.data = self.data
class ISeries1D(interfaces.IRMLDirectiveSignature):
"""A one-dimensional series."""
values = attr.TextNodeSequence(
title=u'Values',
description=u"Numerical values representing the series' data.",
value_type=attr.Float(),
required=True)
class Series1D(Series):
signature = ISeries1D
class IData1D(interfaces.IRMLDirectiveSignature):
"""A 1-D data set."""
occurence.containing(
occurence.OneOrMore('series', ISeries1D)
)
class Data1D(Data):
signature = IData1D
series = Series1D
class ISingleData1D(interfaces.IRMLDirectiveSignature):
"""A 1-D data set."""
occurence.containing(
occurence.One('series', ISeries1D)
)
class SingleData1D(Data1D):
signature = ISingleData1D
def process(self):
self.data = []
self.factories = {'series': self.series}
self.processSubDirectives()
self.parent.context.data = self.data[0]
class ISeries2D(interfaces.IRMLDirectiveSignature):
"""A two-dimensional series."""
values = attr.TextNodeGrid(
title=u'Values',
description=u"Numerical values representing the series' data.",
value_type=attr.Float(),
columns=2,
required=True)
class Series2D(Series):
signature = ISeries2D
class IData2D(interfaces.IRMLDirectiveSignature):
"""A 2-D data set."""
occurence.containing(
occurence.OneOrMore('series', ISeries2D)
)
class Data2D(Data):
signature = IData2D
series = Series2D
class IBar(interfaces.IRMLDirectiveSignature):
"""Define the look of a bar."""
strokeColor = attr.Color(
title=u'Stroke Color',
description=u'The color in which the bar border is drawn.',
required=False)
strokeWidth = attr.Measurement(
title=u'Stroke Width',
description=u'The width of the bar border line.',
required=False)
fillColor = attr.Color(
title=u'Fill Color',
description=u'The color with which the bar is filled.',
required=False)
class Bar(PropertyItem):
signature = IBar
class IBars(IBar):
"""Collection of bar subscriptions."""
occurence.containing(
occurence.ZeroOrMore('bar', IBar)
)
class Bars(PropertyCollection):
signature = IBars
propertyName = 'bars'
factories = {'bar': Bar}
class ILabelBase(interfaces.IRMLDirectiveSignature):
dx = attr.Measurement(
title=u'Horizontal Extension',
description=(u'The width of the label.'),
required=False)
dy = attr.Measurement(
title=u'Vertical Extension',
description=(u'The height of the label.'),
required=False)
angle = attr.Float(
title=u'Angle',
description=(u'The angle to rotate the label.'),
required=False)
boxAnchor = attr.Choice(
title=u'Box Anchor',
description=(u'The position relative to the label.'),
choices=('nw','n','ne','w','c','e','sw','s','se', 'autox', 'autoy'),
required=False)
boxStrokeColor = attr.Color(
title=u'Box Stroke Color',
description=(u'The color of the box border line.'),
required=False)
boxStrokeWidth = attr.Measurement(
title=u'Box Stroke Width',
description=u'The width of the box border line.',
required=False)
boxFillColor = attr.Color(
title=u'Box Fill Color',
description=(u'The color in which the box is filled.'),
required=False)
boxTarget = attr.Text(
title=u'Box Target',
description=u'The box target.',
required=False)
fillColor = attr.Color(
title=u'Fill Color',
description=(u'The color in which the label is filled.'),
required=False)
strokeColor = attr.Color(
title=u'Stroke Color',
description=(u'The color of the label.'),
required=False)
strokeWidth = attr.Measurement(
title=u'Stroke Width',
description=u'The width of the label line.',
required=False)
fontName = attr.String(
title=u'Font Name',
description=u'The font used to print the value.',
required=False)
fontSize = attr.Measurement(
title=u'Font Size',
description=u'The size of the value text.',
required=False)
leading = attr.Measurement(
title=u'Leading',
description=(u'The height of a single text line. It includes '
u'character height.'),
required=False)
width = attr.Measurement(
title=u'Width',
description=u'The width the label.',
required=False)
maxWidth = attr.Measurement(
title=u'Maximum Width',
description=u'The maximum width the label.',
required=False)
height = attr.Measurement(
title=u'Height',
description=u'The height the label.',
required=False)
textAnchor = attr.Choice(
title=u'Text Anchor',
description=u'The position in the text to which the coordinates refer.',
choices=('start', 'middle', 'end', 'boxauto'),
required=False)
visible = attr.Boolean(
title=u'Visible',
description=u'A flag making the label text visible.',
required=False)
leftPadding = attr.Measurement(
title=u'Left Padding',
description=u'The size of the padding on the left side.',
required=False)
rightPadding = attr.Measurement(
title=u'Right Padding',
description=u'The size of the padding on the right side.',
required=False)
topPadding = attr.Measurement(
title=u'Top Padding',
description=u'The size of the padding on the top.',
required=False)
bottomPadding = attr.Measurement(
title=u'Bottom Padding',
description=u'The size of the padding on the bottom.',
required=False)
class IPositionLabelBase(ILabelBase):
x = attr.Measurement(
title=u'X-Coordinate',
description=(u'The X-coordinate of the lower-left position of the '
u'label.'),
required=False)
y = attr.Measurement(
title=u'Y-Coordinate',
description=(u'The Y-coordinate of the lower-left position of the '
u'label.'),
required=False)
class ILabel(IPositionLabelBase):
"""A label for the chart on an axis."""
text = attr.TextNode(
title=u'Text',
description=u'The label text to be displayed.',
required=True)
class Label(PropertyItem):
signature = ILabel
class IBarLabels(ILabelBase):
"""A set of labels for a bar chart"""
occurence.containing(
occurence.ZeroOrMore('label', ILabel)
)
class BarLabels(PropertyCollection):
signature = IBarLabels
propertyName = 'barLabels'
factories = {'label': Label}
name = 'barLabels'
class ILabels(IPositionLabelBase):
"""A set of labels of an axis."""
occurence.containing(
occurence.ZeroOrMore('label', ILabel)
)
class Labels(PropertyCollection):
signature = ILabels
propertyName = 'labels'
factories = {'label': Label}
class IAxis(interfaces.IRMLDirectiveSignature):
occurence.containing(
occurence.ZeroOrMore('labels', ILabels)
)
visible = attr.Boolean(
title=u'Visible',
description=u'When true, draw the entire axis with all details.',
required=False)
visibleAxis = attr.Boolean(
title=u'Visible Axis',
description=u'When true, draw the axis line.',
required=False)
visibleTicks = attr.Boolean(
title=u'Visible Ticks',
description=u'When true, draw the axis ticks on the line.',
required=False)
visibleLabels = attr.Boolean(
title=u'Visible Labels',
description=u'When true, draw the axis labels.',
required=False)
visibleGrid = attr.Boolean(
title=u'Visible Grid',
description=u'When true, draw the grid lines for the axis.',
required=False)
strokeWidth = attr.Measurement(
title=u'Stroke Width',
description=u'The width of axis line and ticks.',
required=False)
strokeColor = attr.Color(
title=u'Stroke Color',
description=u'The color in which the axis line and ticks are drawn.',
required=False)
strokeDashArray = attr.Sequence(
title=u'Stroke Dash Array',
description=u'The dash array that is used for the axis line and ticks.',
value_type=attr.Float(),
required=False)
gridStrokeWidth = attr.Measurement(
title=u'Grid Stroke Width',
description=u'The width of the grid lines.',
required=False)
gridStrokeColor = attr.Color(
title=u'Grid Stroke Color',
description=u'The color in which the grid lines are drawn.',
required=False)
gridStrokeDashArray = attr.Sequence(
title=u'Grid Stroke Dash Array',
description=u'The dash array that is used for the grid lines.',
value_type=attr.Float(),
required=False)
gridStart = attr.Measurement(
title=u'Grid Start',
description=(u'The start of the grid lines with respect to the '
u'axis origin.'),
required=False)
gridEnd = attr.Measurement(
title=u'Grid End',
description=(u'The end of the grid lines with respect to the '
u'axis origin.'),
required=False)
style = attr.Choice(
title=u'Style',
description=u'The plot style of the common categories.',
choices=('parallel', 'stacked', 'parallel_3d'),
required=False)
class Axis(directive.RMLDirective):
signature = IAxis
name = ''
factories = {'labels': Labels}
def process(self):
self.context = axis = getattr(self.parent.context, self.name)
for name, value in self.getAttributeValues():
setattr(axis, name, value)
self.processSubDirectives()
class IName(interfaces.IRMLDirectiveSignature):
"""A category name"""
text = attr.TextNode(
title=u'Text',
description=u'The text value that is the name.',
required=True)
class Name(directive.RMLDirective):
signature = IName
def process(self):
text = self.getAttributeValues(valuesOnly=True)[0]
self.parent.names.append(text)
class ICategoryNames(interfaces.IRMLDirectiveSignature):
"""A list of category names."""
occurence.containing(
occurence.OneOrMore('name', IName),
)
class CategoryNames(directive.RMLDirective):
signature = ICategoryNames
factories = {'name': Name}
def process(self):
self.names = []
self.processSubDirectives()
self.parent.context.categoryNames = self.names
class ICategoryAxis(IAxis):
"""An axis displaying categories (instead of numerical values)."""
occurence.containing(
occurence.ZeroOrOne('categoryNames', ICategoryNames),
*IAxis.queryTaggedValue('directives', ())
)
categoryNames = attr.Sequence(
title=u'Category Names',
description=u'A simple list of category names.',
value_type=attr.Text(),
required=False)
joinAxis = attr.Boolean(
title=u'Join Axis',
description=u'When true, both axes join together.',
required=False)
joinAxisPos = attr.Measurement(
title=u'Join Axis Position',
description=u'The | |
self.assertEqual(expected_formats_result, formats_result)
class TestCheckAssetUriItemsExpectedInWebpage(TestCase):
def test_check_asset_uri_items_expected_in_webpage_returns_success(self):
uri_items_in_html = [
"asset_uri_1.jpg",
"asset_uri_2.jpg",
"/j/xyz/a/lokiujyht?format=html&lang=en",
"/j/xyz/a/lokiujyht?format=pdf&lang=es",
"/j/xyz/a/lokiujyht?format=pdf&lang=en",
]
assets_data = [
{
"prefix": "asset_uri_1",
"uri_alternatives": [
"asset_uri_1.tiff", "asset_uri_1.jpg", "asset_uri_1.png"]
},
{
"prefix": "asset_uri_2",
"uri_alternatives": [
"asset_uri_2.tiff", "asset_uri_2.jpg", "asset_uri_2.png"]
}
]
expected = [
{
"type": "asset",
"id": "asset_uri_1",
"present_in_html": [
"asset_uri_1.jpg",
],
"absent_in_html": [
"asset_uri_1.tiff", "asset_uri_1.png",
],
},
{
"type": "asset",
"id": "asset_uri_2",
"present_in_html": [
"asset_uri_2.jpg",
],
"absent_in_html": [
"asset_uri_2.tiff", "asset_uri_2.png",
],
},
]
expected_summary = {
"total expected": 2,
"total missing": 0,
"total alternatives": 6,
"total alternatives present in html": 2,
}
result, summary = check_asset_uri_items_expected_in_webpage(
uri_items_in_html,
assets_data)
self.assertEqual(expected, result)
self.assertEqual(expected_summary, summary)
def test_check_asset_uri_items_expected_in_webpage_returns_not_found_asset_uri(self):
uri_items_in_html = [
"asset_uri_1.jpg",
]
assets_data = [
{
"prefix": "asset_uri_1",
"uri_alternatives": [
"asset_uri_1.tiff", "asset_uri_1.jpg", "asset_uri_1.png"]
},
{
"prefix": "asset_uri_2",
"uri_alternatives": [
"asset_uri_2.tiff", "asset_uri_2.jpg", "asset_uri_2.png"]
}
]
expected = [
{
"type": "asset",
"id": "asset_uri_1",
"present_in_html": [
"asset_uri_1.jpg",
],
"absent_in_html": [
"asset_uri_1.tiff", "asset_uri_1.png",
],
},
{
"type": "asset",
"id": "asset_uri_2",
"present_in_html": [],
"absent_in_html": [
"asset_uri_2.tiff", "asset_uri_2.jpg", "asset_uri_2.png"],
},
]
expected_summary = {
"total expected": 2,
"total missing": 1,
"total alternatives": 6,
"total alternatives present in html": 1,
}
result, summary = check_asset_uri_items_expected_in_webpage(
uri_items_in_html,
assets_data)
self.assertEqual(expected, result)
self.assertEqual(expected_summary, summary)
def test_check_asset_uri_items_expected_in_webpage_returns_incorrect_assets(self):
uri_items_in_html = [
"asset_uri_1.jpg",
"asset_uri_2.jpg",
"/j/xyz/a/lokiujyht?format=html&lang=en",
"/j/xyz/a/lokiujyht?format=pdf&lang=es",
"/j/xyz/a/lokiujyht?format=pdf&lang=en",
]
assets_data = [
{
"prefix": "asset_uri_1",
"uri_alternatives": []
},
{
"prefix": "asset_uri_2",
"uri_alternatives": []
}
]
expected = [
{
"type": "asset",
"id": "asset_uri_1",
"present_in_html": [],
"absent_in_html": [],
"incorrect": True,
},
{
"type": "asset",
"id": "asset_uri_2",
"present_in_html": [],
"absent_in_html": [],
"incorrect": True,
},
]
expected_summary = {
"total expected": 2,
"total missing": 2,
"total alternatives": 0,
"total alternatives present in html": 0,
"total incorrect assets": 2,
}
result, summary = check_asset_uri_items_expected_in_webpage(
uri_items_in_html,
assets_data)
self.assertEqual(expected, result)
self.assertEqual(expected_summary, summary)
class TestCalculateMissingAndTotalItems(TestCase):
def test_calculate_missing_and_total_returns_no_missing(self):
expected = {
"html": {
"total": 1,
"missing": 0,
},
"pdf": {"total": 2, "missing": 0},
}
data = {
"html": [True],
"pdf": [True, True],
}
result = calculate_missing_and_total(data)
self.assertDictEqual(expected, result)
def test_calculate_missing_and_total_returns_all_missing(self):
expected = {
"html": {
"total": 1,
"missing": 1,
},
"pdf": {"total": 2, "missing": 2},
}
data = {
"html": [False],
"pdf": [False, False],
}
result = calculate_missing_and_total(data)
self.assertDictEqual(expected, result)
def test_calculate_missing_and_total_returns_some_missing(self):
expected = {
"html": {
"total": 2,
"missing": 1,
},
"pdf": {"total": 1, "missing": 1},
}
data = {
"html": [False, True],
"pdf": [False],
}
result = calculate_missing_and_total(data)
self.assertDictEqual(expected, result)
class TestFixesForJSON(TestCase):
def test_fixes_for_json_converts_datetime_to_isoformat(self):
dt = datetime.utcnow()
dt_iso = dt.isoformat() + "Z"
expected = (
'{"date": "%s", "s": "bla", "number": 9, "float": 8.5}' % dt_iso
)
j = {"date": dt, "s": "bla", "number": 9, "float": 8.5}
r = json.dumps(j, default=fixes_for_json)
self.assertEqual(expected, r)
class TestGetStatus(TestCase):
def test_get_status_returns_partial_because_total_incomplete_is_not_zero(self):
summary = {
"web html": {
"total": 1,
"total unavailable": 0,
"total incomplete": 1
},
"web pdf": {
"total": 1,
"total unavailable": 1
},
"renditions": {
"total": 1,
"total unavailable": 0
},
"assets": {
"total": 6,
"total unavailable": 0
},
"processing": {
"start": "t0",
"end": "t3",
"duration": 5
}
}
result = get_status(summary)
self.assertEqual("partial", result)
def test_get_status_returns_partial_because_total_unavailable_is_not_zero(self):
summary = {
"web html": {
"total": 1,
"total unavailable": 0,
"total incomplete": 0
},
"web pdf": {
"total": 1,
"total unavailable": 1
},
"renditions": {
"total": 1,
"total unavailable": 0
},
"assets": {
"total": 6,
"total unavailable": 1
},
"processing": {
"start": "t0",
"end": "t3",
"duration": 5
}
}
result = get_status(summary)
self.assertEqual("partial", result)
def test_get_status_returns_complete_because_total_unavailable_and_total_incomplete_are_zero(self):
summary = {
"web html": {
"total": 1,
"total unavailable": 0,
"total incomplete": 0
},
"web pdf": {
"total": 1,
"total unavailable": 0
},
"renditions": {
"total": 1,
"total unavailable": 0
},
"assets": {
"total": 6,
"total unavailable": 0
},
"processing": {
"start": "t0",
"end": "t3",
"duration": 5
}
}
result = get_status(summary)
self.assertEqual("complete", result)
def test_get_status_returns_missing_because_total_is_zero(self):
summary = {
"web html": {
"total": 0,
"total unavailable": 0,
"total incomplete": 0
},
"web pdf": {
"total": 0,
"total unavailable": 0
},
"renditions": {
"total": 0,
"total unavailable": 0
},
"assets": {
"total": 0,
"total unavailable": 0
},
"processing": {
"start": "t0",
"end": "t3",
"duration": 5
}
}
result = get_status(summary)
self.assertEqual("missing", result)
def test_get_status_returns_missing_because_total_is_equal_to_total_unavailable(self):
summary = {
"web html": {
"total": 1,
"total unavailable": 1,
"total incomplete": 0
},
"web pdf": {
"total": 1,
"total unavailable": 1
},
"renditions": {
"total": 1,
"total unavailable": 1
},
"assets": {
"total": 1,
"total unavailable": 1
},
"processing": {
"start": "t0",
"end": "t3",
"duration": 5
}
}
result = get_status(summary)
self.assertEqual("missing", result)
def test_get_status_returns_missing_because_summary_is_empty(self):
summary = {}
result = get_status(summary)
self.assertEqual("missing", result)
class TestCheckWebsiteUriListDeeply(TestCase):
@patch("operations.check_website_operations.async_requests.parallel_requests")
@patch("operations.check_website_operations.add_execution_in_database")
@patch("operations.check_website_operations.datetime")
@patch("operations.check_website_operations.requests.head")
@patch("operations.check_website_operations.requests.get")
@patch("operations.docs_utils.hooks.kernel_connect")
def test_check_website_uri_list_deeply_calls(self, mock_doc_manifest,
mock_get, mock_head, mock_dt, mock_add_execution_in_database,
mock_parallel_req):
doc_id = "BrT6FWNFFR3KBKHZVPN8Y9N"
website_url = "https://www.scielo.br"
object_store_url = "https://minio.scielo.br"
mock_doc_manifest.return_value = MockResponse(
200,
read_file(
"./tests/fixtures/BrT6FWNFFR3KBKHZVPN8Y9N.manifest.json")
)
mock_get.return_value = MockResponse(
200,
read_file(
"./tests/fixtures/BrT6FWNFFR3KBKHZVPN8Y9N.xml"))
mock_parallel_req.side_effect = [
[
MockClientResponse(
200,
"https://www.scielo.br/j/esa/a/BrT6FWNFFR3KBKHZVPN8Y9N"
"?format=html&lang=pt",
read_file(
"./tests/fixtures/BrT6FWNFFR3KBKHZVPN8Y9N_pt.html")
)
],
[
MockClientResponse(
200,
"https://www.scielo.br/j/esa/a/BrT6FWNFFR3KBKHZVPN8Y9N"
"?format=pdf&lang=pt"
)
],
[
MockClientResponse(
200,
"https://minio.scielo.br/documentstore/1809-4457/"
"BrT6FWNFFR3KBKHZVPN8Y9N/"
"409acdeb8f632022d41b3d94a3f00a837867937c.pdf"
),
],
[
MockClientResponse(
200,
"https://minio.scielo.br/documentstore/1809-4457/"
"BrT6FWNFFR3KBKHZVPN8Y9N/"
"8972aaa0916382b6f2d51a6d22732bb083851913.png"
),
MockClientResponse(
200,
"https://minio.scielo.br/documentstore/1809-4457/"
"BrT6FWNFFR3KBKHZVPN8Y9N/"
"3c30f9fec6947d47f404043fe08aaca8bc51b1fb.jpg"
),
MockClientResponse(
200,
"https://minio.scielo.br/documentstore/1809-4457/"
"BrT6FWNFFR3KBKHZVPN8Y9N/"
"36080074121a60c8e28fa1b28876e1adad4fe5d7.png"
),
MockClientResponse(
200,
"https://minio.scielo.br/documentstore/1809-4457/"
"BrT6FWNFFR3KBKHZVPN8Y9N/"
"b5b4bb9bc267794ececde428a33f5af705b0b1a6.jpg"
),
MockClientResponse(
200,
"https://minio.scielo.br/documentstore/1809-4457/"
"BrT6FWNFFR3KBKHZVPN8Y9N/"
"73a98051b6cf623aeb1146017ceb0b947df75ec8.png"
),
MockClientResponse(
200,
"https://minio.scielo.br/documentstore/1809-4457/"
"BrT6FWNFFR3KBKHZVPN8Y9N/"
"df14e57dc001993fd7f3fbcefa642e40e6964224.jpg"
),
]
]
mock_dt.utcnow.side_effect = [T0, T5]
doc_id_list = [doc_id]
dag_info = {"run_id": "xxxx"}
t0 = T0.isoformat() + "Z"
t1 = START_TIME.isoformat() + "Z"
t2 = END_TIME.isoformat() + "Z"
t3 = T5.isoformat() + "Z"
detail = read_file(
"./tests/fixtures/BrT6FWNFFR3KBKHZVPN8Y9N.inline.json")
expected_calls = [
call("doc_deep_checkup", {
"dag_run": "xxxx",
"input_file_name": None,
"pid_v3": "BrT6FWNFFR3KBKHZVPN8Y9N",
"pid_v2_journal": "1413-4152",
"pid_v2_issue": "1413-415220200050",
"pid_v2_doc": "S1413-41522020005004201",
"previous_pid_v2_doc": None,
"status": "partial",
"detail": detail.replace("t0", t0).replace(
"t1", t1).replace("t2", t2).replace("t3", t3).replace(
'2.7e-05', str(T0_to_T5)).replace(
'9e-06', str(DURATION))
}),
]
check_website_uri_list_deeply(
doc_id_list, website_url, object_store_url, dag_info)
self.assertEqual(mock_get.call_count, 1)
self.assertEqual(mock_dt.utcnow.call_count, 2)
self.assertListEqual(
expected_calls,
mock_add_execution_in_database.call_args_list
)
class TestGroupDocDataByWebpageType(TestCase):
def test_group_doc_data_by_webpage_type(self):
doc_webpages_data = [
{"doc_id": "x", "data": "2", "format": "html"},
{"doc_id": "x", "data": "1", "format": "pdf"},
{"doc_id": "x", "data": "b", "format": "html"},
{"doc_id": "x", "data": "a", "format": "pdf"},
]
expected = {
"web html": [
{"doc_id": "x", "data": "2", "format": "html"},
{"doc_id": "x", "data": "b", "format": "html"},
],
"web pdf": [
{"doc_id": "x", "data": "1", "format": "pdf"},
{"doc_id": "x", "data": "a", "format": "pdf"},
],
}
result = group_doc_data_by_webpage_type(doc_webpages_data)
self.assertDictEqual(expected, result)
class TesteGetNumberOfIncompleteHtml(TestCase):
def test_returns_0_if_giving_incomplete_is_0(self):
incomplete = 0
self.assertEqual(get_number_of_incomplete_html(incomplete, []), 0)
def test_returns_incomplete_if_there_is_only_one_html_and_incomplete(self):
report = [
{
"lang": "en",
"format": "html",
"pid_v2": "pid-v2", "acron": "xjk",
"doc_id_for_human": "artigo-1234",
"doc_id": "ldld",
"uri": "https://www.scielo.br/j/xjk/a/ldld?format=html&lang=en",
"available": True,
"status code": 200,
"components": [
{
"type": "asset",
"id": "asset_uri_1",
"present_in_html": ["asset_uri_1.jpg",],
"absent_in_html": ["asset_uri_1.tiff", "asset_uri_1.png",],
},
{
"type": "asset",
"id": "asset_uri_2",
"present_in_html": [],
"absent_in_html": [
"asset_uri_2.jpg", "asset_uri_2.tiff", "asset_uri_2.png",
],
},
{
"type": "pdf",
"id": "en",
"present_in_html": ["/j/xjk/a/ldld?format=pdf&lang=en",],
},
{
"type": "html",
"id": "es",
"present_in_html": ["/j/xjk/a/ldld?format=html&lang=es",],
},
{
"type": "pdf",
"id": "es",
"present_in_html": ["/j/xjk/a/ldld?format=pdf&lang=es",],
},
],
"total missing components": 1,
"total expected components": 5,
"pdf": {"total": 2, "missing": 0},
"html": {"total": 1, "missing": 0},
"assets": {
"total expected": 2,
"total missing": 1,
"total alternatives": 6,
"total alternatives present in html": 1,
},
},
]
incomplete = 1
self.assertEqual(get_number_of_incomplete_html(incomplete, report), incomplete)
def test_returns_incomplete_if_missing_components_in_one_of_htmls(self):
report = [
{
"lang": "en",
"format": "html",
"pid_v2": "pid-v2", "acron": "xjk",
"doc_id_for_human": "artigo-1234",
"doc_id": "ldld",
"uri": "https://www.scielo.br/j/xjk/a/ldld?format=html&lang=en",
"available": True,
"status code": 200,
"components": [
{
"type": "asset",
"id": "asset_uri_1",
"present_in_html": ["asset_uri_1.jpg",],
"absent_in_html": ["asset_uri_1.tiff", "asset_uri_1.png",],
},
{
"type": "asset",
"id": "asset_uri_2",
"present_in_html": ["asset_uri_2.jpg",],
"absent_in_html": ["asset_uri_2.tiff", "asset_uri_2.png",],
},
{
"type": "pdf",
"id": "en",
"present_in_html": ["/j/xjk/a/ldld?format=pdf&lang=en",],
},
{
"type": "html",
"id": "es",
"present_in_html": [],
"absent_in_html": ["/j/xjk/a/ldld?format=html&lang=es",],
},
{
"type": "pdf",
"id": "es",
"present_in_html": ["/j/xjk/a/ldld?format=pdf&lang=es",],
},
],
"total missing components": 0,
"total expected components": 5,
"pdf": {"total": 2, "missing": 1},
"html": {"total": 1, "missing": 0},
"assets": {
"total expected": 2,
"total missing": 0,
"total alternatives": 6,
"total alternatives present in html": 2,
},
},
{
"lang": "es",
"format": "html",
"pid_v2": "pid-v2", "acron": "xjk",
"doc_id_for_human": "artigo-1234",
"doc_id": "ldld",
"uri": "https://www.scielo.br/j/xjk/a/ldld?format=html&lang=es",
"available": True,
"status code": 200,
"start time": START_TIME,
"end time": END_TIME,
"duration": DURATION,
"components": [
{
"type": "asset",
"id": "asset_uri_1",
"present_in_html": [],
"absent_in_html": [
"asset_uri_1.jpg", "asset_uri_1.tiff", "asset_uri_1.png",
],
},
{
"type": "asset",
"id": "asset_uri_2",
"present_in_html": ["asset_uri_2.jpg",],
"absent_in_html": ["asset_uri_2.tiff", "asset_uri_2.png",],
},
{
"type": "html",
"id": "en",
"present_in_html": ["/j/xjk/a/ldld?format=html&lang=en",],
},
{
"type": "pdf",
"id": "en",
"present_in_html": | |
from __future__ import annotations
import pandas as pd
import numpy as np
from typing import List, Dict, Union, TYPE_CHECKING
from whyqd.base import BaseSchemaAction
if TYPE_CHECKING:
from ..models import ColumnModel, ModifierModel, FieldModel, CategoryModel, CategoryActionModel
class Action(BaseSchemaAction):
"""
Produce categories from terms or headers. There are three categorisation options:
1. Term-data are categories derived from values in the data,
2. Header-data are terms derived from the header name and boolean True for any value,
3. "Boolean"-data, the category itself is True/False.
Script::
"CATEGORISE > 'destination_field' < [modifier 'source_column', modifier 'source_column', etc.]"
Where a `-` modifier indicates that some or all values in the column are coerced to `boolean`, and `+` indicates
that specific values in the column are to be assigned to a defined `schema` `category`. This ACTION requires that
`values` in `columns` be ASSIGNED to the appropriate `schema` `category`::
"ASSIGN_CATEGORY_BOOLEANS > 'destination_field'::bool < 'source_column'"
or::
"ASSIGN_CATEGORY_UNIQUES > 'destination_field'::'destination_category' < 'source_column'::['unique_source_term', 'unique_source_term', etc.]"
.. note:: Categorisation requires that the destination `schema` `field` is assigned appropriate `category` `constraints`.
"""
def __init__(self) -> None:
super().__init__()
self.name = "CATEGORISE"
self.title = "Categorise"
self.description = "Apply categories to a list of columns. Each field must have a modifier, including the first (e.g. +A -B +C). '-' modifier indicates presence/absence of values as true/false for a specific term. '+' modifier indicates that the unique terms in the field must be matched to the unique terms defined in the schema. This is a two-step process, first requiring listing the columns effected, then applying the terms."
self.structure = ["modifier", "field"]
@property
def modifiers(self) -> List[ModifierModel]:
"""
Describes the modifiers for calculations.
Returns
-------
list of ModifierModel
ModifierModel representation of the modifiers.
"""
return [
{"name": "+", "title": "Uniques"},
{"name": "-", "title": "Values"},
]
def transform(
self,
df: pd.DataFrame,
destination: FieldModel,
source: List[Union[ColumnModel, ModifierModel]],
assigned: List[
Dict[str, Union[CategoryActionModel, FieldModel, CategoryModel, ColumnModel, List[CategoryModel]]]
],
) -> pd.DataFrame:
"""
Produce categories from terms or headers. There are three categorisation options:
1. Term-data are categories derived from values in the data,
2. Header-data are terms derived from the header name and boolean True for any value,
3. "Boolean"-data, the category itself is True/False. Default in a boolean is True.
.. note:: Categorisation is a special case, requiring both method fields, and method categories, which it can do
only if the `destination` is a schema field, which has the required term definitions.
Parameters
----------
df: DataFrame
Working data to be transformed
destination: FieldModel
Destination FieldModel for the result of the Action.
source: list of ColumnModel and / or ModifierModel
List of source columns and modifiers for the action.
assigned: list of dict
Each dict has values for: Assignment ACTION, destination schema field, schema category, source data column,
and a list of source data column category terms assigned to that schema category.
Returns
-------
Dataframe
Containing the implementation of the Action
"""
# This is a complex algorith, and so there are LOTS of comments to explain each step.
# 1. The source list terms are in sets of two: + or - modifier, field
# The modifier defines one of two approaches:
# '+': The terms in the field are used to identify the schema category. This is used when the column values
# represent multiple terms.
# '-': Non-null terms indicate presence of a schema category. This is used when values represent a
# boolean True, and NaNs or voids represent False. The user must decide if zeros (0) are null or a value.
# If the schema field type is 'array', then the destination category terms are lists.
# If field type is 'boolean', then the default term is True.
# The default category term is assigned for any null values.
is_array = True if destination.type_field == "array" else False
is_boolean = True if destination.type_field == "boolean" else False
# Sort out boolean term names and defaults before they cause further pain ...
if destination.constraints:
# Boolean categories can only be True or False, but the user can set a default
default = None if not destination.constraints.default else destination.constraints.default.name
elif is_boolean:
default = True
# Get all the assigned Schema Categories
destination_schema_categories = [assigned_category["category"].name for assigned_category in assigned]
if is_boolean:
# Can only be True/False, and we'll be resolving a text version of the names later, so ...
destination_schema_categories = [str(c).lower() for c in destination_schema_categories]
# Set the field according to the default
# https://stackoverflow.com/a/31469249
df[destination.name] = [[] for _ in range(len(df))] if is_array else default
# Develop the terms and conditions to assess membership of a category
# As per structure, requires sets of two terms: + or - modifier, field
new_field = []
term_set = len(self.structure)
# Annotate before the loop https://stackoverflow.com/a/41641489/295606
modifier: ModifierModel
source_column: ColumnModel
for modifier, source_column in self.core.chunks(source, term_set):
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.select.html
# Extract only the terms valid for this particular field
# With list of action scripts::
# "ACTION > 'schema_field' < [modifier 'source_column1', modifier 'source_column2']"
# for modifier, source_column in chunks[2, source_terms]:
# GENERATE: np.selection(conditions, category_terms, default)
# GENERATE CATEGORY TERMS
# With list of category action assignment scripts::
# ["ASSIGN_CATEGORY_UNIQUES > 'schema_field'::'schema_category1' < 'source_column1'::['source_category_term1',etc.]",
# "ASSIGN_CATEGORY_UNIQUES > 'schema_field'::'schema_category2' < 'source_column1'::['source_category_term2',etc.]",
# "ASSIGN_CATEGORY_UNIQUES > 'schema_field'::'schema_category1' < 'source_column2'::['source_category_term3',etc.]"
# ...]
category_terms = []
# for schema_category in list of all schema_categories for this field
for schema_category in destination_schema_categories:
# for source_category_term in assigned source_categories from the list of all unique source terms in source_column
for assigned_category in assigned:
# if this 'source_column' (from the looped list of all source_columns) == 'source_column'
if assigned_category["source"].name == source_column.name:
# Extend [schema_category for _source_category in source_categories]
# Creating a matrix for np.select where::
# category_terms = [schema_category1, schema_category1, schema_category2, schema_category2, ...]
# Where the lengths of the two lists are identical, and the terms are allocated such that::
# Len(category_terms) == len(source_categories)
category_terms.extend([schema_category for _ in assigned_category.get("assigned", [])])
# From here, things depend on the modifier.
if modifier.name == "+":
conditions = self._get_unique_conditions(df, source_column, category_terms, assigned)
elif modifier.name == "-":
conditions = self._get_boolean_conditions(df, source_column, category_terms, assigned)
if is_boolean:
# if len(category_terms) == 1 and 'false', do nothing; if 'true', invert;
# if len(category_terms) == 2, use 'false' only
if "true" in category_terms:
# We need to correct for the disaster of 'true'
invrt = dict(zip(category_terms, conditions))
if len(category_terms) == 1:
invrt["false"] = ~invrt["true"]
category_terms = [t for t in invrt.keys() if t != "true"]
conditions = [invrt["false"]]
# Only two terms, True or False. Reset the dictionary names
if is_boolean and "false" in category_terms:
category_terms = [False if t == "false" else t for t in category_terms]
if not is_array:
# Set the field category_terms immediately for membership, note, if no data defaults to current
# i.e. this is equivalent to order, but with category data
df[destination.name] = np.select(conditions, category_terms, default=df[destination.name])
else:
if category_terms and conditions:
new_field.append(np.select(conditions, category_terms, default="none").tolist())
# If a list of terms, organise the nested lists and then set the new field
if is_array:
# Sorted to avoid hashing errors later ...
new_field = [sorted(list(set(x))) for x in zip(*new_field)]
for n in new_field:
if "none" in n:
n.remove("none")
if new_field:
df[destination.name] = new_field
return df
###################################################################################################
### SUPPORT UTILITIES
###################################################################################################
def _get_unique_conditions(
self,
df: pd.DataFrame,
source: ColumnModel,
category_terms: List[str],
assigned: List[
Dict[str, Union[CategoryActionModel, FieldModel, CategoryModel, ColumnModel, List[CategoryModel]]]
],
) -> List[pd.Series]:
"""Return a list of pandas Series of boolean results to support an np.select conditions term. Used when
`modifier.name == '+'`.
Generate a matrix of len(category_terms) arrays by len(rows) of the source dataframe where they correspond
as follows::
[schema_category1, schema_category1, schema_category2, schema_category2, ...]
[[source_cat_t1], [source_cat_t1], [source_cat_t1], [source_cat_t1], ...]
Where each value is tested for inclusion in the assigned categories for that schema category::
[[True], [False], [False], ...]
Where only one should be `True` (although, I suppose, that's up to the user if a source category belongs to more
than one schema category), and all | |
import hashlib
import io
import os
import shutil
import tempfile
import time
from pathlib import Path
from runez.system import _R, abort, Anchored, decode, flattened, resolved_path, short, SYMBOLIC_TMP, SYS_INFO, UNSET
def basename(path, extension_marker=os.extsep, follow=False):
"""Base name of given `path`, ignoring extension if `extension_marker` is provided
Args:
path (str | Path | None): Path to consider
extension_marker (str | None): If provided: trim file extension
follow (bool): If True, follow symlink
Returns:
(str): Basename part of path, without extension if 'extension_marker' provided
"""
if path:
if follow:
path = os.path.realpath(path)
path = os.path.basename(path)
if extension_marker and extension_marker in path:
pre, _, post = path.rpartition(extension_marker)
if pre:
return pre
return path
def checksum(path, hash=hashlib.sha256, blocksize=65536):
"""
Args:
path (str | Path | None): Path to file
hash: Hash algorithm to use
blocksize (int):
Returns:
(str): Hex-digest
"""
if isinstance(hash, str):
hash = getattr(hashlib, hash)
if callable(hash):
hash = hash()
with open(path, "rb") as fh:
buf = fh.read(blocksize)
while len(buf) > 0:
hash.update(buf)
buf = fh.read(blocksize)
return hash.hexdigest()
def copy(source, destination, ignore=None, overwrite=True, fatal=True, logger=UNSET, dryrun=UNSET):
"""Copy source -> destination
Args:
source (str | Path | None): Source file or folder
destination (str | Path | None): Destination file or folder
ignore (callable | list | str | None): Names to be ignored
overwrite (bool | None): True: replace existing, False: fail if destination exists, None: no destination check
fatal (type | bool | None): True: abort execution on failure, False: don't abort but log, None: don't abort, don't log
logger (callable | bool | None): Logger to use, True to print(), False to trace(), None to disable log chatter
dryrun (bool): Optionally override current dryrun setting
Returns:
(int): In non-fatal mode, 1: successfully done, 0: was no-op, -1: failed
"""
return _file_op(source, destination, _copy, overwrite, fatal, logger, dryrun, ignore=ignore)
def delete(path, fatal=True, logger=UNSET, dryrun=UNSET):
"""
Args:
path (str | Path | None): Path to file or folder to delete
fatal (type | bool | None): True: abort execution on failure, False: don't abort but log, None: don't abort, don't log
logger (callable | bool | None): Logger to use, True to print(), False to trace(), None to disable log chatter
dryrun (bool): Optionally override current dryrun setting
Returns:
(int): In non-fatal mode, 1: successfully done, 0: was no-op, -1: failed
"""
path = resolved_path(path)
islink = path and os.path.islink(path)
if not islink and (not path or not os.path.exists(path)):
return 0
if _R.hdry(dryrun, logger, "delete %s" % short(path)):
return 1
try:
_do_delete(path, islink, fatal)
_R.hlog(logger, "Deleted %s" % short(path))
return 1
except Exception as e:
return abort("Can't delete %s" % short(path), exc_info=e, return_value=-1, fatal=fatal, logger=logger)
def ensure_folder(path, clean=False, fatal=True, logger=UNSET, dryrun=UNSET):
"""Ensure folder with 'path' exists
Args:
path (str | Path | None): Path to file or folder
clean (bool): True: If True, ensure folder is clean (delete any file/folder it may have)
fatal (type | bool | None): True: abort execution on failure, False: don't abort but log, None: don't abort, don't log
logger (callable | bool | None): Logger to use, True to print(), False to trace(), None to disable log chatter
dryrun (bool): Optionally override current dryrun setting
Returns:
(int): In non-fatal mode, >=1: successfully done, 0: was no-op, -1: failed
"""
path = resolved_path(path)
if not path:
return 0
if os.path.isdir(path):
if not clean:
return 0
cleaned = 0
for fname in os.listdir(path):
cleaned += delete(os.path.join(path, fname), fatal=fatal, logger=None, dryrun=dryrun)
if cleaned:
msg = "%s from %s" % (_R.plural(cleaned, "file"), short(path))
if not _R.hdry(dryrun, logger, "clean %s" % msg):
_R.hlog(logger, "Cleaned %s" % msg)
return cleaned
if _R.hdry(dryrun, logger, "create %s" % short(path)):
return 1
try:
os.makedirs(path)
_R.hlog(logger, "Created folder %s" % short(path))
return 1
except Exception as e:
return abort("Can't create folder %s" % short(path), exc_info=e, return_value=-1, fatal=fatal, logger=logger)
def filesize(*paths, logger=False):
"""
Args:
*paths (str | Path | None): Paths to files/folders
logger (callable | bool | None): Logger to use, True to print(), False to trace(), None to disable log chatter
Returns:
(int): File size in bytes
"""
size = 0
for path in flattened(paths, unique=True):
path = to_path(path)
if path and path.exists() and not path.is_symlink():
if path.is_dir():
for sf in path.iterdir():
size += filesize(sf)
elif path.is_file():
try:
size += path.stat().st_size
except Exception as e: # pragma: no cover, ignore cases like permission denied, file name too long, etc
_R.hlog(logger, "Can't stat %s: %s" % (short(path), short(e, size=32)))
return size
def ini_to_dict(path, keep_empty=False, fatal=False, logger=False):
"""Contents of an INI-style config file as a dict of dicts: section -> key -> value
Args:
path (str | Path | None): Path to file to parse
keep_empty (bool): If True, keep definitions with empty values
fatal (type | bool | None): True: abort execution on failure, False: don't abort but log, None: don't abort, don't log
logger (callable | bool | None): Logger to use, True to print(), False to trace(), None to disable log chatter
Returns:
(dict): Dict of section -> key -> value
"""
result = {}
section_key = None
section = None
for line in readlines(path, fatal=fatal, logger=logger):
line, _, _ = line.partition("#")
line = line.strip()
if line:
if line.startswith("[") and line.endswith("]"):
section_key = line.strip("[]").strip()
section = result.get(section_key)
continue
if "=" not in line:
continue
if section is None:
section = result[section_key] = {}
key, _, value = line.partition("=")
key = key.strip()
value = value.strip()
if keep_empty or (key and value):
section[key] = value
if not keep_empty:
result = dict((k, v) for k, v in result.items() if k and v)
return result
def is_younger(path, age, default=False):
"""
Args:
path (str | Path): Path to file
age (int | float | None): How many seconds to consider the file too old
default (bool): Returned when file is not present
Returns:
(bool): True if file exists and is younger than 'age' seconds
"""
try:
if age > 0:
return time.time() - os.path.getmtime(path) < age
except (OSError, IOError, TypeError):
return default
def ls_dir(path):
"""A --dryrun friendly version of Path.iterdir
Args:
path (str | Path | None): Path to folder
Yields:
(Path): Sub-folders / files, if any
"""
path = to_path(path)
if path and path.is_dir():
yield from path.iterdir()
def parent_folder(path, base=None):
"""Parent folder of `path`, relative to `base`
Args:
path (str | Path | None): Path to file or folder
base (str | None): Base folder to use for relative paths (default: current working dir)
Returns:
(str): Absolute path of parent folder
"""
return path and os.path.dirname(resolved_path(path, base=base))
def readlines(path, first=None, errors="ignore", fatal=False, logger=False):
"""
Args:
path (str | Path | None): Path to file to read lines from
first (int | None): Return only the 'first' lines when specified
errors (str | None): Optional string specifying how encoding errors are to be handled
fatal (type | bool | None): True: abort execution on failure, False: don't abort but log, None: don't abort, don't log
logger (callable | bool | None): Logger to use, True to print(), False to trace(), None to disable log chatter
Yields:
(str): Lines read, newlines and trailing spaces stripped
"""
try:
with io.open(resolved_path(path), errors=errors) as fh:
if not first:
first = -1
for line in fh:
if first == 0:
return
yield decode(line).rstrip()
first -= 1
except Exception as e:
message = "Can't read %s" % short(path)
if fatal:
abort(_R.actual_message(message), exc_info=e, fatal=fatal, logger=logger)
_R.hlog(logger, message, exc_info=e)
def to_path(path, no_spaces=False):
"""
Args:
path (str | Path): Path to convert
no_spaces (type | bool | None): If True-ish, abort if 'path' contains a space
Returns:
(Path | None): Converted to `Path` object, if necessary
"""
if no_spaces and " " in str(path):
abort("Refusing path with space (not worth escaping all the things to make this work): '%s'" % short(path), fatal=no_spaces)
if isinstance(path, Path):
return path
if path is not None:
if path:
path = os.path.expanduser(path)
return Path(path)
def move(source, destination, overwrite=True, fatal=True, logger=UNSET, dryrun=UNSET):
"""Move `source` -> `destination`
Args:
source (str | None): Source file or folder
destination | |
<reponame>AustralianDisabilityLimited/MultiversePlatform<gh_stars>10-100
#
# The Multiverse Platform is made available under the MIT License.
#
# Copyright (c) 2012 The Multiverse Foundation
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
#
from java.util.concurrent import *
from java.util import *
from java.lang import *
from java.net import *
from java.sql import *
from multiverse.mars import *
from multiverse.mars.core import *
from multiverse.mars.objects import *
from multiverse.mars.util import *
from multiverse.mars.plugins import *
from multiverse.server.math import *
from multiverse.server.plugins import *
from multiverse.server.events import *
from multiverse.server.objects import *
from multiverse.server.engine import *
from multiverse.server.util import *
import time
import sys
driverName = "com.mysql.jdbc.Driver"
Class.forName(driverName)
# photo storage
places_url = "http://places.multiverse.net/"
# host running web database
webdb_host = "webdb.mv-places.com"
# for testing
#webdb_host = "localhost"
ProxyPlugin.MaxConcurrentUsers = 400
ROOM_PLAYER_LIMIT = 50
maxUsersProp = Engine.getProperty("places.max_concurrent_users")
if maxUsersProp != None:
ProxyPlugin.MaxConcurrentUsers = int(maxUsersProp)
roomLimitProp = Engine.getProperty("places.room_player_limit")
if roomLimitProp != None:
ROOM_PLAYER_LIMIT = int(roomLimitProp)
AGENT_NAME = Engine.getAgent().getName()
TOKEN_LIFE = 30000 # 30 seconds after which the token expires
def getDomainHost():
hostName = Engine.getMessageServerHostname()
if hostName == 'localhost':
try:
localMachine = InetAddress.getLocalHost()
hostName = localMachine.getHostName()
except UnknownHostException:
Log.error("getDomainHost: couldn't get host name from local IP address %s" % str(localMachine))
Log.debug("getDomainHost: hostname = %s" % hostName)
return hostName
domainHostName = getDomainHost()
class SetMeshCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
cmd = cmdEvent.getCommand();
playerOid = cmdEvent.getObjectOid()
meshstring = cmd[cmd.index(' ')+1:]
submeshes = LinkedList()
meshlist = meshstring.split()
basemesh = meshlist[0]
for i in range(1, len(meshlist)-1, 2):
submesh = DisplayContext.Submesh(meshlist[i], meshlist[i+1])
submeshes.add(submesh)
Log.debug("/setmesh: oid=" + str(playerOid) + " to: " + meshstring)
WorldManagerClient.modifyDisplayContext(playerOid, WorldManagerClient.ModifyDisplayContextAction.REPLACE, basemesh, submeshes)
class PlayAnimationCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
cmd = cmdEvent.getCommand();
playerOid = cmdEvent.getObjectOid()
animation = cmd[cmd.index(' ')+1:]
Log.debug("/playanimation: oid=" + str(playerOid) + " with: " + animation);
AnimationClient.playSingleAnimation(playerOid, animation)
class DanceCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
cmd = cmdEvent.getCommand()
args = cmd.split()
playerOid = cmdEvent.getObjectOid()
Log.debug("/dance: oid=" + str(playerOid))
if len(args) == 1:
currentDanceState = EnginePlugin.getObjectProperty(playerOid, Namespace.WORLD_MANAGER, "dancestate")
newDanceState = 0
if currentDanceState == 0:
rand = Random()
newDanceState = int(rand.nextInt(6)) + 1
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "dancestate", newDanceState)
elif len(args) == 2:
if args[1] == "on":
newDanceState = int(rand.nextInt(6)) + 1
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "dancestate", newDanceState)
elif args[1] == "off" or args[1] == "0":
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "dancestate", 0)
else:
try:
newDanceState = int(args[1])
if newDanceState >= 1 and newDanceState <= 6:
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "dancestate", newDanceState)
except:
pass
class GestureCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
cmd = cmdEvent.getCommand()
args = cmd.split()
playerOid = cmdEvent.getObjectOid()
Log.debug("/gesture: oid=" + str(playerOid))
if len(args) == 1:
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "gesturestate", Boolean(not EnginePlugin.getObjectProperty(playerOid, Namespace.WORLD_MANAGER, "gesturestate")))
elif len(args) == 2:
if args[1] == "on":
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "gesturestate", Boolean(True))
if args[1] == "off":
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "gesturestate", Boolean(False))
sitList = {
'low' : 'ntrl_sit_50cm',
'med' : 'ntrl_sit_75cm',
'high' : 'ntrl_sit_85cm',
'1' : 'ntrl_sit_50cm_attd_01_idle_01',
'2' : 'ntrl_sit_50cm_attd_02_idle_01',
'3' : 'ntrl_sit_50cm_attd_03_idle_01',
}
class SitCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
cmd = cmdEvent.getCommand()
args = cmd.split()
playerOid = cmdEvent.getObjectOid()
Log.debug("/sit: oid=" + str(playerOid))
if len(args) == 1:
Log.debug("/sit: oid=" + str(playerOid))
if (not WorldManagerClient.getObjectProperty(playerOid, "sitstate")):
AnimationClient.playSingleAnimation(playerOid, "sit") # stand to sit
else:
# AnimationClient.playSingleAnimation(playerOid, "stand") # sit to stand
pass
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "sitstate", Boolean(not EnginePlugin.getObjectProperty(playerOid, Namespace.WORLD_MANAGER, "sitstate")))
elif len(args) == 2:
sitStyle = args[1]
Log.debug("/sit: oid=" + str(playerOid) + ", sit style=" + sitStyle)
if sitStyle == "on":
AnimationClient.playSingleAnimation(playerOid, "sit") # stand to sit
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "sitstate", Boolean(True))
return
elif sitStyle == "off":
# AnimationClient.playSingleAnimation(playerOid, "stand") # sit to stand
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "sitstate", Boolean(False))
return
animName = 'sit'
if sitStyle in sitList.keys():
animName = sitList[sitStyle]
if (not WorldManagerClient.getObjectProperty(playerOid, "sitstate")):
AnimationClient.playSingleAnimation(playerOid, animName) # stand to sit
else:
# AnimationClient.playSingleAnimation(playerOid, "stand") # sit to stand
pass
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "sitstate", Boolean(not EnginePlugin.getObjectProperty(playerOid, Namespace.WORLD_MANAGER, "sitstate")))
class GMCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
cmd = cmdEvent.getCommand()
args = cmd.split()
playerOid = cmdEvent.getObjectOid()
accountId = EnginePlugin.getObjectProperty(playerOid, Namespace.WORLD_MANAGER, "AccountId")
if isAdmin(accountId):
Log.debug("/gmmode: oid=" + str(playerOid))
gmMode = EnginePlugin.getObjectProperty(playerOid, Namespace.WORLD_MANAGER, "GMMode")
if gmMode == None:
gmMode = False
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, "GMMode", Boolean(not gmMode))
class PropertyCommand(ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
cmd = cmdEvent.getCommand()
args = cmd.split()
if len(args) == 3:
playerOid = cmdEvent.getObjectOid()
Log.debug("/property: oid=" + str(playerOid) + " " + args[1] + " " + args[2])
propName = args[1]
propValue = args[2]
EnginePlugin.setObjectProperty(playerOid, Namespace.WORLD_MANAGER, propName, propValue)
if len(args) == 2:
playerOid = cmdEvent.getObjectOid()
Log.debug("/property: oid=" + str(playerOid) + " " + args[1])
propName = args[1]
propValue = EnginePlugin.getObjectProperty(playerOid, Namespace.WORLD_MANAGER, propName)
WorldManagerClient.sendObjChatMsg(playerOid, 0, str(propValue))
class IgnoreCommand(ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
player = proxyPlugin.getPlayer(playerOid)
cmd = cmdEvent.getCommand()
args = cmd.split()
Log.debug("/ignore: oid=%s; cmd=%s; args=%s" % (str(playerOid), cmd, args))
# Rest for 2+ but only ignore the first.
# Additional args may be first name, last name, etc.,
# for greater ignore granularity in the future.
if len(args) >= 2:
result = proxyPlugin.matchingPlayers(player, args[1], True)
if result is not None:
oids = result[0]
if oids is not None and oids.size() > 0:
if playerOid in oids: # can't ignore self
# This is ugly, but remove(playerOid) doesn't
# work (playerOid is treated as an index), and
# indexOf(playerOid) returns -1.
for i in range(len(oids)):
if playerOid == oids[i]:
oids.remove(i)
break;
# Make sure removing playerOid didn't empty the list.
if oids.size() > 0:
proxyPlugin.updateIgnoredOids(player, oids, None)
WorldManagerClient.sendObjChatMsg(playerOid, 0, "You are now ignoring all characters named %s." % args[1])
else:
WorldManagerClient.sendObjChatMsg(playerOid, 0, "No matches found for %s." % args[1])
else:
WorldManagerClient.sendObjChatMsg(playerOid, 0, "No matches found for %s." % args[1])
else:
WorldManagerClient.sendObjChatMsg(playerOid, 0, "Usage: /ignore playername")
#
# places specific /sys command
# determine admin status of caller, than calls into common/proxy.py
#
class FRW_SysCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
accountId = EnginePlugin.getObjectProperty(playerOid, Namespace.WORLD_MANAGER, "AccountId")
if isAdmin(accountId):
handleSysCommand(cmdEvent)
proxyPlugin.registerCommand("/setmesh", SetMeshCommand())
proxyPlugin.registerCommand("/playanimation", PlayAnimationCommand())
proxyPlugin.registerCommand("/dance", DanceCommand())
proxyPlugin.registerCommand("/gesture", GestureCommand())
proxyPlugin.registerCommand("/sit", SitCommand())
proxyPlugin.registerCommand("/gmmode", GMCommand())
proxyPlugin.registerCommand("/property", PropertyCommand())
proxyPlugin.registerCommand("/ignore", IgnoreCommand())
proxyPlugin.registerCommand("/sys", FRW_SysCommand())
class YesCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
Log.debug("/yes: oid=" + str(playerOid))
AnimationClient.playSingleAnimation(playerOid, "ntrl_nod")
class NoCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
Log.debug("/no: oid=" + str(playerOid))
AnimationClient.playSingleAnimation(playerOid, "ntrl_headshake")
class ShrugCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
Log.debug("/shrug: oid=" + str(playerOid))
AnimationClient.playSingleAnimation(playerOid, "ntrl_shrug")
class LaughCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
Log.debug("/laugh: oid=" + str(playerOid))
AnimationClient.playSingleAnimation(playerOid, "ntrl_laugh")
class WaveCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
Log.debug("/wave: oid=" + str(playerOid))
AnimationClient.playSingleAnimation(playerOid, "ntrl_wave")
class BowCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
Log.debug("/bow: oid=" + str(playerOid))
AnimationClient.playSingleAnimation(playerOid, "ntrl_bow")
class PointCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
Log.debug("/point: oid=" + str(playerOid))
AnimationClient.playSingleAnimation(playerOid, "ntrl_point")
class ClapCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
Log.debug("/clap: oid=" + str(playerOid))
AnimationClient.playSingleAnimation(playerOid, "ntrl_clap")
class CheerCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
Log.debug("/cheer: oid=" + str(playerOid))
AnimationClient.playSingleAnimation(playerOid, "ntrl_cheer")
class AttitudeCommand (ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
cmd = cmdEvent.getCommand()
args = cmd.split()
animNum = None
if len(args) > 1:
try:
animNum = int(args[1])
except:
animNum = 1
else:
animNum = 1
if animNum > 3:
animNum = 1
Log.debug("/attitude: oid= %s; cmd=%s" % (str(playerOid), cmd))
AnimationClient.playSingleAnimation(playerOid, "ntrl_attd_%02d_idle_01" % animNum)
class SetTVUrlCommand(ProxyPlugin.CommandParser):
def parse(self, cmdEvent):
playerOid = cmdEvent.getObjectOid()
tvOid = cmdEvent.getTarget()
cmd = cmdEvent.getCommand()
splitCmd = cmd.split(" ")
url = splitCmd[1]
if url != None and (url.startswith("http://") or url.startswith("mms://")):
WorldManagerClient.setObjectProperty(tvOid,"tv_url", url)
WorldManagerClient.sendObjChatMsg(playerOid, 0, "TV set to: " + url)
else:
WorldManagerClient.sendObjChatMsg(playerOid, 0, "Please include http:// or mms:// in the address")
proxyPlugin.registerCommand("/yes", YesCommand())
proxyPlugin.registerCommand("/no", NoCommand())
proxyPlugin.registerCommand("/shrug", ShrugCommand())
proxyPlugin.registerCommand("/laugh", LaughCommand())
proxyPlugin.registerCommand("/wave", WaveCommand())
proxyPlugin.registerCommand("/bow", BowCommand())
proxyPlugin.registerCommand("/point", PointCommand())
proxyPlugin.registerCommand("/clap", ClapCommand())
proxyPlugin.registerCommand("/cheer", CheerCommand())
proxyPlugin.registerCommand("/attitude", AttitudeCommand())
proxyPlugin.registerCommand("/attd", AttitudeCommand())
proxyPlugin.registerCommand("/settvurl", SetTVUrlCommand())
def instanceSetObjectProperty(instanceOid, oid, namespace, key, value):
props = EnginePlugin.getObjectProperty(instanceOid, Namespace.INSTANCE, "RoomItemsProps")
objInfo | |
# -*- coding: utf-8 -*-
# AUTHOR: <NAME>
# DATE: 19-9-19
import logging
import os
import torch.optim as optim
import torch.distributed as dist
from itertools import product
from .dee_helper import logger, DEEExample, DEEExampleLoader, DEEFeatureConverter, \
convert_dee_features_to_dataset, prepare_doc_batch_dict, measure_dee_prediction, \
decode_dump_template, eval_dump_template
from .utils import BERTChineseCharacterTokenizer, default_dump_json, default_load_pkl
from .ner_model import BertForBasicNER
from .base_task import TaskSetting, BasePytorchTask
from .event_type import event_type_fields_list
from .dee_model import Doc2EDAGModel, DCFEEModel
class DEETaskSetting(TaskSetting):
base_key_attrs = TaskSetting.base_key_attrs
base_attr_default_pairs = [
('train_file_name', 'Doc2EDAG_train_simple.json'),
('dev_file_name', 'Doc2EDAG_dev.json'),
('test_file_name', 'Doc2EDAG_test.json'),
('summary_dir_name', '/tmp/Summary'),
('max_sent_len', 128),
('max_sent_num', 64),
('train_batch_size', 10),
('gradient_accumulation_steps', 8),
('eval_batch_size', 2),
('learning_rate', 1e-4),
('num_train_epochs', 100),
('no_cuda', False),
('local_rank', -1),
('seed', 99),
('optimize_on_cpu', False),
('fp16', False),
('use_bert', False), # whether to use bert as the encoder
('bert_model', 'bert-base-chinese'), # use which pretrained bert model
('only_master_logging', True), # whether to print logs from multiple processes
('resume_latest_cpt', True), # whether to resume latest checkpoints when training for fault tolerance
('cpt_file_name', 'DCFEE'), # decide the identity of checkpoints, evaluation results, etc.
('model_type', 'DCFEE'), # decide the model class used
('rearrange_sent', False), # whether to rearrange sentences
('use_crf_layer', True), # whether to use CRF Layer
('min_teacher_prob', 0.1), # the minimum prob to use gold spans
('schedule_epoch_start', 10), # from which epoch the scheduled sampling starts
('schedule_epoch_length', 10), # the number of epochs to linearly transit to the min_teacher_prob
('loss_lambda', 0.05), # the proportion of ner loss
('loss_gamma', 1.0), # the scaling proportion of missed span sentence ner loss
('add_greedy_dec', True), # whether to add additional greedy decoding
('use_token_role', True), # whether to use detailed token role
('seq_reduce_type', 'MaxPooling'), # use 'MaxPooling', 'MeanPooling' or 'AWA' to reduce a tensor sequence
# network parameters (follow Bert Base)
('hidden_size', 768),
('dropout', 0.1),
('ff_size', 1024), # feed-forward mid layer size
('num_tf_layers', 4), # transformer layer number
# ablation study parameters,
('use_path_mem', True), # whether to use the memory module when expanding paths
('use_scheduled_sampling', True), # whether to use the scheduled sampling
('use_doc_enc', True), # whether to use document-level entity encoding
('neg_field_loss_scaling', 3.0), # prefer FNs over FPs
]
def __init__(self, **kwargs):
super(DEETaskSetting, self).__init__(
self.base_key_attrs, self.base_attr_default_pairs, **kwargs
)
class DEETask(BasePytorchTask):
"""Doc-level Event Extraction Task"""
def __init__(self, dee_setting, load_train=True, load_dev=True, load_test=True,
parallel_decorate=True):
super(DEETask, self).__init__(dee_setting, only_master_logging=dee_setting.only_master_logging)
self.logger = logging.getLogger(self.__class__.__name__)
self.logging('Initializing {}'.format(self.__class__.__name__))
self.tokenizer = BERTChineseCharacterTokenizer.from_pretrained(self.setting.bert_model)
self.setting.vocab_size = len(self.tokenizer.vocab)
# get entity and event label name
self.entity_label_list = DEEExample.get_entity_label_list()
self.event_type_fields_pairs = DEEExample.get_event_type_fields_pairs()
# build example loader
self.example_loader_func = DEEExampleLoader(self.setting.rearrange_sent, self.setting.max_sent_len)
# build feature converter
if self.setting.use_bert:
self.feature_converter_func = DEEFeatureConverter(
self.entity_label_list, self.event_type_fields_pairs,
self.setting.max_sent_len, self.setting.max_sent_num, self.tokenizer,
include_cls=True, include_sep=True,
)
else:
self.feature_converter_func = DEEFeatureConverter(
self.entity_label_list, self.event_type_fields_pairs,
self.setting.max_sent_len, self.setting.max_sent_num, self.tokenizer,
include_cls=False, include_sep=False,
)
# load data
self._load_data(
self.example_loader_func, self.feature_converter_func, convert_dee_features_to_dataset,
load_train=load_train, load_dev=load_dev, load_test=load_test,
)
# customized mini-batch producer
self.custom_collate_fn = prepare_doc_batch_dict
if not self.setting.use_token_role:
# no token role conflicts with some settings
assert self.setting.model_type == 'Doc2EDAG'
assert self.setting.add_greedy_dec is False
self.setting.num_entity_labels = 3 # 0: 'O', 1: 'Begin', 2: 'Inside'
else:
self.setting.num_entity_labels = len(self.entity_label_list)
if self.setting.use_bert:
ner_model = BertForBasicNER.from_pretrained(
self.setting.bert_model, num_entity_labels = self.setting.num_entity_labels
)
self.setting.update_by_dict(ner_model.config.__dict__) # BertConfig dictionary
# substitute pooler in bert to support distributed training
# because unused parameters will cause errors when conducting distributed all_reduce
class PseudoPooler(object):
def __init__(self):
pass
def __call__(self, *x):
return x
del ner_model.bert.pooler
ner_model.bert.pooler = PseudoPooler()
else:
ner_model = None
if self.setting.model_type == 'Doc2EDAG':
self.model = Doc2EDAGModel(
self.setting, self.event_type_fields_pairs, ner_model=ner_model,
)
elif self.setting.model_type == 'DCFEE':
self.model = DCFEEModel(
self.setting, self.event_type_fields_pairs, ner_model=ner_model
)
else:
raise Exception('Unsupported model type {}'.format(self.setting.model_type))
self._decorate_model(parallel_decorate=parallel_decorate)
# prepare optimizer
self.optimizer = optim.Adam(self.model.parameters(), lr=self.setting.learning_rate)
# # resume option
# if resume_model or resume_optimizer:
# self.resume_checkpoint(resume_model=resume_model, resume_optimizer=resume_optimizer)
self.min_teacher_prob = None
self.teacher_norm = None
self.teacher_cnt = None
self.teacher_base = None
self.reset_teacher_prob()
self.logging('Successfully initialize {}'.format(self.__class__.__name__))
def reset_teacher_prob(self):
self.min_teacher_prob = self.setting.min_teacher_prob
if self.train_dataset is None:
# avoid crashing when not loading training data
num_step_per_epoch = 500
else:
num_step_per_epoch = int(len(self.train_dataset) / self.setting.train_batch_size)
self.teacher_norm = num_step_per_epoch * self.setting.schedule_epoch_length
self.teacher_base = num_step_per_epoch * self.setting.schedule_epoch_start
self.teacher_cnt = 0
def get_teacher_prob(self, batch_inc_flag=True):
if self.teacher_cnt < self.teacher_base:
prob = 1
else:
prob = max(
self.min_teacher_prob, (self.teacher_norm - self.teacher_cnt + self.teacher_base) / self.teacher_norm
)
if batch_inc_flag:
self.teacher_cnt += 1
return prob
def get_event_idx2entity_idx2field_idx(self):
entity_idx2entity_type = {}
for entity_idx, entity_label in enumerate(self.entity_label_list):
if entity_label == 'O':
entity_type = entity_label
else:
entity_type = entity_label[2:]
entity_idx2entity_type[entity_idx] = entity_type
event_idx2entity_idx2field_idx = {}
for event_idx, (event_name, field_types) in enumerate(self.event_type_fields_pairs):
field_type2field_idx = {}
for field_idx, field_type in enumerate(field_types):
field_type2field_idx[field_type] = field_idx
entity_idx2field_idx = {}
for entity_idx, entity_type in entity_idx2entity_type.items():
if entity_type in field_type2field_idx:
entity_idx2field_idx[entity_idx] = field_type2field_idx[entity_type]
else:
entity_idx2field_idx[entity_idx] = None
event_idx2entity_idx2field_idx[event_idx] = entity_idx2field_idx
return event_idx2entity_idx2field_idx
def get_loss_on_batch(self, doc_batch_dict, features=None):
if features is None:
features = self.train_features
# teacher_prob = 1
# if use_gold_span, gold spans will be used every time
# else, teacher_prob will ensure the proportion of using gold spans
if self.setting.use_scheduled_sampling:
use_gold_span = False
teacher_prob = self.get_teacher_prob()
else:
use_gold_span = True
teacher_prob = 1
try:
loss = self.model(
doc_batch_dict, features, use_gold_span=use_gold_span, train_flag=True, teacher_prob=teacher_prob
)
except Exception as e:
print('-'*30)
print('Exception occurs when processing ' +
','.join([features[ex_idx].guid for ex_idx in doc_batch_dict['ex_idx']]))
raise Exception('Cannot get the loss')
return loss
def get_event_decode_result_on_batch(self, doc_batch_dict, features=None, use_gold_span=False, heuristic_type=None):
if features is None:
raise Exception('Features mush be provided')
if heuristic_type is None:
event_idx2entity_idx2field_idx = None
else:
# this mapping is used to get span candidates for each event field
event_idx2entity_idx2field_idx = self.get_event_idx2entity_idx2field_idx()
batch_eval_results = self.model(
doc_batch_dict, features, use_gold_span=use_gold_span, train_flag=False,
event_idx2entity_idx2field_idx=event_idx2entity_idx2field_idx, heuristic_type=heuristic_type,
)
return batch_eval_results
def train(self, save_cpt_flag=True, resume_base_epoch=None):
self.logging('=' * 20 + 'Start Training' + '=' * 20)
self.reset_teacher_prob()
# resume_base_epoch arguments have higher priority over settings
if resume_base_epoch is None:
# whether to resume latest cpt when restarting, very useful for preemptive scheduling clusters
if self.setting.resume_latest_cpt:
resume_base_epoch = self.get_latest_cpt_epoch()
else:
resume_base_epoch = 0
# resume cpt if possible
if resume_base_epoch > 0:
self.logging('Training starts from epoch {}'.format(resume_base_epoch))
for _ in range(resume_base_epoch):
self.get_teacher_prob()
self.resume_cpt_at(resume_base_epoch, resume_model=True, resume_optimizer=True)
else:
self.logging('Training starts from scratch')
self.base_train(
DEETask.get_loss_on_batch,
kwargs_dict1={},
epoch_eval_func=DEETask.resume_save_eval_at,
kwargs_dict2={
'save_cpt_flag': save_cpt_flag,
'resume_cpt_flag': False,
},
base_epoch_idx=resume_base_epoch,
)
def resume_save_eval_at(self, epoch, resume_cpt_flag=False, save_cpt_flag=True):
if self.is_master_node():
print('\nPROGRESS: {:.2f}%\n'.format(epoch / self.setting.num_train_epochs * 100))
self.logging('Current teacher prob {}'.format(self.get_teacher_prob(batch_inc_flag=False)))
if resume_cpt_flag:
self.resume_cpt_at(epoch)
if self.is_master_node() and save_cpt_flag:
self.save_cpt_at(epoch)
if self.setting.model_type == 'DCFEE':
eval_tasks = product(['dev', 'test'], [False, True], ['DCFEE-O', 'DCFEE-M'])
else:
if self.setting.add_greedy_dec:
eval_tasks = product(['dev', 'test'], [False, True], ['GreedyDec', None])
else:
eval_tasks = product(['dev', 'test'], [False, True], [None])
for task_idx, (data_type, gold_span_flag, heuristic_type) in enumerate(eval_tasks):
if self.in_distributed_mode() and task_idx % dist.get_world_size() != dist.get_rank():
continue
if data_type == 'test':
features = self.test_features
dataset = self.test_dataset
elif data_type == 'dev':
features = self.dev_features
dataset = self.dev_dataset
else:
raise Exception('Unsupported data type {}'.format(data_type))
if gold_span_flag:
span_str = 'gold_span'
else:
span_str = 'pred_span'
if heuristic_type is None:
# store user-provided name
model_str = self.setting.cpt_file_name.replace('.', '~')
else:
model_str = heuristic_type
decode_dump_name = decode_dump_template.format(data_type, span_str, model_str, epoch)
eval_dump_name = eval_dump_template.format(data_type, span_str, model_str, epoch)
self.eval(features, dataset, use_gold_span=gold_span_flag, heuristic_type=heuristic_type,
dump_decode_pkl_name=decode_dump_name, dump_eval_json_name=eval_dump_name)
def save_cpt_at(self, epoch):
self.save_checkpoint(cpt_file_name='{}.cpt.{}'.format(self.setting.cpt_file_name, epoch), epoch=epoch)
def resume_cpt_at(self, epoch, resume_model=True, resume_optimizer=False):
self.resume_checkpoint(cpt_file_name='{}.cpt.{}'.format(self.setting.cpt_file_name, epoch),
resume_model=resume_model, resume_optimizer=resume_optimizer)
def get_latest_cpt_epoch(self):
prev_epochs = []
for fn in os.listdir(self.setting.model_dir):
if fn.startswith('{}.cpt'.format(self.setting.cpt_file_name)):
try:
epoch = int(fn.split('.')[-1])
prev_epochs.append(epoch)
except Exception as e:
continue
prev_epochs.sort()
if len(prev_epochs) > 0:
latest_epoch = prev_epochs[-1]
self.logging('Pick latest epoch {} from {}'.format(latest_epoch, str(prev_epochs)))
else:
latest_epoch = 0
self.logging('No previous epoch checkpoints, just start from scratch')
return latest_epoch
def eval(self, features, dataset, use_gold_span=False, heuristic_type=None,
dump_decode_pkl_name=None, dump_eval_json_name=None):
self.logging('=' * 20 + 'Start Evaluation' + '=' * 20)
if dump_decode_pkl_name is not None:
dump_decode_pkl_path = os.path.join(self.setting.output_dir, dump_decode_pkl_name)
self.logging('Dumping decode results into {}'.format(dump_decode_pkl_name))
else:
dump_decode_pkl_path = None
total_event_decode_results = self.base_eval(
dataset, DEETask.get_event_decode_result_on_batch,
reduce_info_type='none', dump_pkl_path=dump_decode_pkl_path,
features=features, use_gold_span=use_gold_span, heuristic_type=heuristic_type,
)
self.logging('Measure DEE Prediction')
if dump_eval_json_name is not None:
dump_eval_json_path = os.path.join(self.setting.output_dir, dump_eval_json_name)
self.logging('Dumping eval results into {}'.format(dump_eval_json_name))
else:
dump_eval_json_path = None
total_eval_res = measure_dee_prediction(
self.event_type_fields_pairs, features, total_event_decode_results,
dump_json_path=dump_eval_json_path
)
return total_event_decode_results, total_eval_res
def reevaluate_dee_prediction(self, target_file_pre='dee_eval', target_file_suffix='.pkl',
dump_flag=False):
"""Enumerate the evaluation directory to collect all dumped evaluation results"""
eval_dir_path = self.setting.output_dir
logger.info('Re-evaluate dee predictions from {}'.format(eval_dir_path))
data_span_type2model_str2epoch_res_list = {}
for fn in os.listdir(eval_dir_path):
fn_splits = fn.split('.')
if fn.startswith(target_file_pre) and fn.endswith(target_file_suffix) and len(fn_splits) == 6:
_, data_type, span_type, model_str, epoch, _ = fn_splits
data_span_type = (data_type, span_type)
if data_span_type not in data_span_type2model_str2epoch_res_list:
data_span_type2model_str2epoch_res_list[data_span_type] = {}
model_str2epoch_res_list = data_span_type2model_str2epoch_res_list[data_span_type]
if model_str | |
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""Provides a non-parametric two-stage least squares instrumental variable estimator."""
import numpy as np
from copy import deepcopy
from sklearn import clone
from sklearn.linear_model import LinearRegression
from ...utilities import (shape, transpose, reshape, cross_product, ndim, size,
_deprecate_positional, check_input_arrays)
from ..._cate_estimator import BaseCateEstimator, LinearCateEstimator
from numpy.polynomial.hermite_e import hermeval
from sklearn.base import TransformerMixin
from sklearn.preprocessing import PolynomialFeatures
from itertools import product
class HermiteFeatures(TransformerMixin):
"""
Featurizer that returns(unscaled) Hermite function evaluations.
The evaluated functions are of degrees 0..`degree`, differentiated `shift` times.
If the input has shape(n, x) and `joint` is False, the output will have shape(n, (`degree`+ 1)×x) if `shift` is 0.
If the input has shape(n, x) and `joint` is True, the output will have shape(n, (`degree`+ 1) ^ x) if `shift` is 0.
In either case, if `shift` is nonzero there will be `shift` additional dimensions of size x
between the first and last.
"""
def __init__(self, degree, shift=0, joint=False):
self._degree = degree
self._shift = shift
self._joint = joint
def _column_feats(self, X, shift):
"""
Apply Hermite function evaluations of degrees 0..`degree` differentiated `shift` times.
When applied to the column `X` of shape(n,), the resulting array has shape(n, (degree + 1)).
"""
assert ndim(X) == 1
# this will have dimension (d,) + shape(X)
coeffs = np.identity(self._degree + shift + 1)[:, shift:]
feats = ((-1) ** shift) * hermeval(X, coeffs) * np.exp(-X * X / 2)
# send the first dimension to the end
return transpose(feats)
def fit(self, X):
"""Fits the data(a NOP for this class) and returns self."""
return self
def transform(self, X):
"""
Transform the data by applying the appropriate Hermite functions.
Parameters
----------
X: array_like
2-dimensional array of input features
Returns
-------
The transformed data
"""
assert ndim(X) == 2
n = shape(X)[0]
ncols = shape(X)[1]
columns = []
for indices in product(*[range(ncols) for i in range(self._shift)]):
if self._joint:
columns.append(cross_product(*[self._column_feats(X[:, i], indices.count(i))
for i in range(shape(X)[1])]))
else:
indices = set(indices)
if self._shift == 0: # return features for all columns:
columns.append(np.hstack([self._column_feats(X[:, i], self._shift) for i in range(shape(X)[1])]))
# columns are featurized independently; partial derivatives are only non-zero
# when taken with respect to the same column each time
elif len(indices) == 1:
index = list(indices)[0]
feats = self._column_feats(X[:, index], self._shift)
columns.append(np.hstack([feats if i == index else np.zeros(shape(feats))
for i in range(shape(X)[1])]))
else:
columns.append(np.zeros((n, (self._degree + 1) * ncols)))
return reshape(np.hstack(columns), (n,) + (ncols,) * self._shift + (-1,))
class DPolynomialFeatures(TransformerMixin):
"""
Featurizer that returns the derivatives of :class:`~sklearn.preprocessing.PolynomialFeatures` features in
a way that's compativle with the expectations of :class:`.NonparametricTwoStageLeastSquares`'s
`dt_featurizer` parameter.
If the input has shape `(n, x)` and
:meth:`PolynomialFeatures.transform<sklearn.preprocessing.PolynomialFeatures.transform>` returns an output
of shape `(n, f)`, then :meth:`.transform` will return an array of shape `(n, x, f)`.
Parameters
----------
degree: integer, default = 2
The degree of the polynomial features.
interaction_only: boolean, default = False
If true, only derivatives of interaction features are produced: features that are products of at most degree
distinct input features (so not `x[1] ** 2`, `x[0] * x[2] ** 3`, etc.).
include_bias: boolean, default = True
If True (default), then include the derivative of a bias column, the feature in which all polynomial powers
are zero.
"""
def __init__(self, degree=2, interaction_only=False, include_bias=True):
self.F = PolynomialFeatures(degree=degree, interaction_only=interaction_only, include_bias=include_bias)
def fit(self, X, y=None):
"""
Compute number of output features.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The data.
y : array, optional
Not used
Returns
-------
self : instance
"""
return self
def transform(self, X):
"""
Transform data to derivatives of polynomial features
Parameters
----------
X: array-like, shape (n_samples, n_features)
The data to transform, row by row.
Returns
-------
XP: array-like, shape (n_samples, n_features, n_output_features)
The matrix of features, where `n_output_features` is the number of features that
would be returned from :class:`~sklearn.preprocessing.PolynomialFeatures`.
"""
self.F.fit(X)
powers = self.F.powers_
result = np.zeros(X.shape + (self.F.n_output_features_,))
for i in range(X.shape[1]):
p = powers.copy()
c = powers[:, i]
p[:, i] -= 1
M = np.float_power(X[:, np.newaxis, :], p[np.newaxis, :, :])
result[:, i, :] = c[np.newaxis, :] * np.prod(M, axis=-1)
return result
def _add_ones(arr):
"""Add a column of ones to the front of an array."""
return np.hstack([np.ones((shape(arr)[0], 1)), arr])
def _add_zeros(arr):
"""Add a column of zeros to the front of an array."""
return np.hstack([np.zeros((shape(arr)[0], 1)), arr])
class SieveTSLS(BaseCateEstimator):
"""
Non-parametric instrumental variables estimator.
Supports the use of arbitrary featurizers for the features, treatments, and instruments.
Parameters
----------
t_featurizer: transformer
Featurizer used to transform the treatments
x_featurizer: transformer
Featurizer used to transform the raw features
z_featurizer: transformer
Featurizer used to transform the instruments
dt_featurizer: transformer
Featurizer used to transform the treatments for the computation of the marginal effect.
This should produce a 3-dimensional array, containing the per-treatment derivative of
each transformed treatment. That is, given a treatment array of shape(n, dₜ),
the output should have shape(n, dₜ, fₜ), where fₜ is the number of columns produced by `t_featurizer`.
"""
def __init__(self, *,
t_featurizer,
x_featurizer,
z_featurizer,
dt_featurizer):
self._t_featurizer = clone(t_featurizer, safe=False)
self._x_featurizer = clone(x_featurizer, safe=False)
self._z_featurizer = clone(z_featurizer, safe=False)
self._dt_featurizer = clone(dt_featurizer, safe=False)
# don't fit intercept; manually add column of ones to the data instead;
# this allows us to ignore the intercept when computing marginal effects
self._model_T = LinearRegression(fit_intercept=False)
self._model_Y = LinearRegression(fit_intercept=False)
super().__init__()
@_deprecate_positional("X, W, and Z should be passed by keyword only. In a future release "
"we will disallow passing X, W, and Z by position.", ['X', 'W', 'Z'])
@BaseCateEstimator._wrap_fit
def fit(self, Y, T, X, W, Z, *, inference=None):
"""
Estimate the counterfactual model from data, i.e. estimates functions τ(·, ·, ·), ∂τ(·, ·).
Parameters
----------
Y: (n × d_y) matrix
Outcomes for each sample
T: (n × dₜ) matrix
Treatments for each sample
X: optional(n × dₓ) matrix
Features for each sample
W: optional(n × d_w) matrix
Controls for each sample
Z: optional(n × d_z) matrix
Instruments for each sample
inference: string, :class:`.Inference` instance, or None
Method for performing inference. This estimator supports 'bootstrap'
(or an instance of :class:`.BootstrapInference`)
Returns
-------
self
"""
Y, T, X, W, Z = check_input_arrays(Y, T, X, W, Z)
if X is None:
X = np.empty((shape(Y)[0], 0))
if W is None:
W = np.empty((shape(Y)[0], 0))
assert shape(Y)[0] == shape(T)[0] == shape(X)[0] == shape(W)[0] == shape(Z)[0]
# make T 2D if if was a vector
if ndim(T) == 1:
T = reshape(T, (-1, 1))
# store number of columns of W so that we can create correctly shaped zero array in effect and marginal effect
self._d_w = shape(W)[1]
# two stage approximation
# first, get basis expansions of T, X, and Z
ft_X = self._x_featurizer.fit_transform(X)
ft_Z = self._z_featurizer.fit_transform(Z)
ft_T = self._t_featurizer.fit_transform(T)
# TODO: is it right that the effective number of intruments is the
# product of ft_X and ft_Z, not just ft_Z?
assert shape(ft_T)[1] <= shape(ft_X)[1] * shape(ft_Z)[1], ("There can be no more T features than the product "
"of the number of X and Z features; otherwise "
"there is not enough information to identify their "
"structure")
# regress T expansion on X,Z expansions concatenated with W
features = _add_ones(np.hstack([W, cross_product(ft_X, ft_Z)]))
self._model_T.fit(features, ft_T)
# predict ft_T from interacted ft_X, ft_Z
ft_T_hat = self._model_T.predict(features)
self._model_Y.fit(_add_ones(np.hstack([W, cross_product(ft_T_hat, ft_X)])), Y)
def effect(self, X=None, T0=0, T1=1):
"""
Calculate the heterogeneous treatment effect τ(·,·,·).
The effect is calculated between the two treatment points
conditional on a vector of features on a set of m test samples {T0ᵢ, T1ᵢ, Xᵢ}.
Parameters
----------
T0: (m × dₜ) matrix or vector of length m
Base treatments for each sample
T1: (m × dₜ) matrix or vector of length m
Target treatments for each sample
X: optional (m × dₓ) matrix
Features for each sample
Returns
-------
τ: (m × d_y) matrix
Heterogeneous treatment effects on each outcome for each sample
Note that when Y is a vector rather than a 2-dimensional array, the corresponding
singleton dimension will be collapsed (so this method will return a vector)
"""
if ndim(T0) == 0:
T0 = | |
<gh_stars>10-100
import math
import torch
import treetensor.torch as ttorch
from .base import choose_mark
# noinspection DuplicatedCode,PyUnresolvedReferences
class TestTorchFuncsMath:
@choose_mark()
def test_abs(self):
t1 = ttorch.abs(ttorch.tensor([12, 0, -3]))
assert isinstance(t1, torch.Tensor)
assert (t1 == ttorch.tensor([12, 0, 3])).all()
t2 = ttorch.abs(ttorch.tensor({
'a': [12, 0, -3],
'b': {'x': [[-3, 1], [0, -2]]},
}))
assert (t2 == ttorch.tensor({
'a': [12, 0, 3],
'b': {'x': [[3, 1], [0, 2]]},
})).all()
@choose_mark()
def test_abs_(self):
t1 = ttorch.tensor([12, 0, -3])
assert isinstance(t1, torch.Tensor)
t1r = ttorch.abs_(t1)
assert t1r is t1
assert isinstance(t1, torch.Tensor)
assert (t1 == ttorch.tensor([12, 0, 3])).all()
t2 = ttorch.tensor({
'a': [12, 0, -3],
'b': {'x': [[-3, 1], [0, -2]]},
})
t2r = ttorch.abs_(t2)
assert t2r is t2
assert (t2 == ttorch.tensor({
'a': [12, 0, 3],
'b': {'x': [[3, 1], [0, 2]]},
})).all()
@choose_mark()
def test_clamp(self):
t1 = ttorch.clamp(ttorch.tensor([-1.7120, 0.1734, -0.0478, 2.0922]), min=-0.5, max=0.5)
assert isinstance(t1, torch.Tensor)
assert (ttorch.abs(t1 - ttorch.tensor([-0.5000, 0.1734, -0.0478, 0.5000])) < 1e-6).all()
t2 = ttorch.clamp(ttorch.tensor({
'a': [-1.7120, 0.1734, -0.0478, 2.0922],
'b': {'x': [[-0.9049, 1.7029, -0.3697], [0.0489, -1.3127, -1.0221]]},
}), min=-0.5, max=0.5)
assert (ttorch.abs(t2 - ttorch.tensor({
'a': [-0.5000, 0.1734, -0.0478, 0.5000],
'b': {'x': [[-0.5000, 0.5000, -0.3697],
[0.0489, -0.5000, -0.5000]]},
})) < 1e-6).all()
@choose_mark()
def test_clamp_(self):
t1 = ttorch.tensor([-1.7120, 0.1734, -0.0478, 2.0922])
t1r = ttorch.clamp_(t1, min=-0.5, max=0.5)
assert t1r is t1
assert isinstance(t1, torch.Tensor)
assert (ttorch.abs(t1 - ttorch.tensor([-0.5000, 0.1734, -0.0478, 0.5000])) < 1e-6).all()
t2 = ttorch.tensor({
'a': [-1.7120, 0.1734, -0.0478, 2.0922],
'b': {'x': [[-0.9049, 1.7029, -0.3697], [0.0489, -1.3127, -1.0221]]},
})
t2r = ttorch.clamp_(t2, min=-0.5, max=0.5)
assert t2r is t2
assert (ttorch.abs(t2 - ttorch.tensor({
'a': [-0.5000, 0.1734, -0.0478, 0.5000],
'b': {'x': [[-0.5000, 0.5000, -0.3697],
[0.0489, -0.5000, -0.5000]]},
})) < 1e-6).all()
@choose_mark()
def test_sign(self):
t1 = ttorch.sign(ttorch.tensor([12, 0, -3]))
assert isinstance(t1, torch.Tensor)
assert (t1 == ttorch.tensor([1, 0, -1])).all()
t2 = ttorch.sign(ttorch.tensor({
'a': [12, 0, -3],
'b': {'x': [[-3, 1], [0, -2]]},
}))
assert (t2 == ttorch.tensor({
'a': [1, 0, -1],
'b': {'x': [[-1, 1],
[0, -1]]},
})).all()
@choose_mark()
def test_round(self):
t1 = ttorch.round(ttorch.tensor([[1.2, -1.8], [-2.3, 2.8]]))
assert isinstance(t1, torch.Tensor)
assert (ttorch.abs(t1 - ttorch.tensor([[1., -2.],
[-2., 3.]])) < 1e-6).all()
t2 = ttorch.round(ttorch.tensor({
'a': [[1.2, -1.8], [-2.3, 2.8]],
'b': {'x': [[1.0, -3.9, 1.3], [-4.8, -2.0, 2.8]]},
}))
assert (ttorch.abs(t2 - ttorch.tensor({
'a': [[1., -2.],
[-2., 3.]],
'b': {'x': [[1., -4., 1.],
[-5., -2., 3.]]},
})) < 1e-6).all()
@choose_mark()
def test_round_(self):
t1 = ttorch.tensor([[1.2, -1.8], [-2.3, 2.8]])
t1r = ttorch.round_(t1)
assert t1r is t1
assert isinstance(t1, torch.Tensor)
assert (ttorch.abs(t1 - ttorch.tensor([[1., -2.],
[-2., 3.]])) < 1e-6).all()
t2 = ttorch.tensor({
'a': [[1.2, -1.8], [-2.3, 2.8]],
'b': {'x': [[1.0, -3.9, 1.3], [-4.8, -2.0, 2.8]]},
})
t2r = ttorch.round_(t2)
assert t2r is t2
assert (ttorch.abs(t2 - ttorch.tensor({
'a': [[1., -2.],
[-2., 3.]],
'b': {'x': [[1., -4., 1.],
[-5., -2., 3.]]},
})) < 1e-6).all()
@choose_mark()
def test_floor(self):
t1 = ttorch.floor(ttorch.tensor([[1.2, -1.8], [-2.3, 2.8]]))
assert isinstance(t1, torch.Tensor)
assert (ttorch.abs(t1 - ttorch.tensor([[1., -2.],
[-3., 2.]])) < 1e-6).all()
t2 = ttorch.floor(ttorch.tensor({
'a': [[1.2, -1.8], [-2.3, 2.8]],
'b': {'x': [[1.0, -3.9, 1.3], [-4.8, -2.0, 2.8]]},
}))
assert (ttorch.abs(t2 - ttorch.tensor({
'a': [[1., -2.],
[-3., 2.]],
'b': {'x': [[1., -4., 1.],
[-5., -2., 2.]]},
})) < 1e-6).all()
@choose_mark()
def test_floor_(self):
t1 = ttorch.tensor([[1.2, -1.8], [-2.3, 2.8]])
t1r = ttorch.floor_(t1)
assert t1r is t1
assert isinstance(t1, torch.Tensor)
assert (ttorch.abs(t1 - ttorch.tensor([[1., -2.],
[-3., 2.]])) < 1e-6).all()
t2 = ttorch.tensor({
'a': [[1.2, -1.8], [-2.3, 2.8]],
'b': {'x': [[1.0, -3.9, 1.3], [-4.8, -2.0, 2.8]]},
})
t2r = ttorch.floor_(t2)
assert t2r is t2
assert (ttorch.abs(t2 - ttorch.tensor({
'a': [[1., -2.],
[-3., 2.]],
'b': {'x': [[1., -4., 1.],
[-5., -2., 2.]]},
})) < 1e-6).all()
@choose_mark()
def test_ceil(self):
t1 = ttorch.ceil(ttorch.tensor([[1.2, -1.8], [-2.3, 2.8]]))
assert isinstance(t1, torch.Tensor)
assert (ttorch.abs(t1 - ttorch.tensor([[2., -1.],
[-2., 3.]])) < 1e-6).all()
t2 = ttorch.ceil(ttorch.tensor({
'a': [[1.2, -1.8], [-2.3, 2.8]],
'b': {'x': [[1.0, -3.9, 1.3], [-4.8, -2.0, 2.8]]},
}))
assert (ttorch.abs(t2 - ttorch.tensor({
'a': [[2., -1.],
[-2., 3.]],
'b': {'x': [[1., -3., 2.],
[-4., -2., 3.]]},
})) < 1e-6).all()
@choose_mark()
def test_ceil_(self):
t1 = ttorch.tensor([[1.2, -1.8], [-2.3, 2.8]])
t1r = ttorch.ceil_(t1)
assert t1r is t1
assert isinstance(t1, torch.Tensor)
assert (ttorch.abs(t1 - ttorch.tensor([[2., -1.],
[-2., 3.]])) < 1e-6).all()
t2 = ttorch.tensor({
'a': [[1.2, -1.8], [-2.3, 2.8]],
'b': {'x': [[1.0, -3.9, 1.3], [-4.8, -2.0, 2.8]]},
})
t2r = ttorch.ceil_(t2)
assert t2r is t2
assert (ttorch.abs(t2 - ttorch.tensor({
'a': [[2., -1.],
[-2., 3.]],
'b': {'x': [[1., -3., 2.],
[-4., -2., 3.]]},
})) < 1e-6).all()
@choose_mark()
def test_sigmoid(self):
t1 = ttorch.sigmoid(ttorch.tensor([1.0, 2.0, -1.5]))
assert isinstance(t1, torch.Tensor)
assert (ttorch.abs(t1 - ttorch.tensor([0.7311, 0.8808, 0.1824])) < 1e-4).all()
t2 = ttorch.sigmoid(ttorch.tensor({
'a': [1.0, 2.0, -1.5],
'b': {'x': [[0.5, 1.2], [-2.5, 0.25]]},
}))
assert (ttorch.abs(t2 - ttorch.tensor({
'a': [0.7311, 0.8808, 0.1824],
'b': {'x': [[0.6225, 0.7685],
[0.0759, 0.5622]]},
})) < 1e-4).all()
@choose_mark()
def test_sigmoid_(self):
t1 = ttorch.tensor([1.0, 2.0, -1.5])
t1r = ttorch.sigmoid_(t1)
assert t1r is t1
assert isinstance(t1, torch.Tensor)
assert (ttorch.abs(t1 - ttorch.tensor([0.7311, 0.8808, 0.1824])) < 1e-4).all()
t2 = ttorch.tensor({
'a': [1.0, 2.0, -1.5],
'b': {'x': [[0.5, 1.2], [-2.5, 0.25]]},
})
t2r = ttorch.sigmoid_(t2)
assert t2r is t2
assert (ttorch.abs(t2 - ttorch.tensor({
'a': [0.7311, 0.8808, 0.1824],
'b': {'x': [[0.6225, 0.7685],
[0.0759, 0.5622]]},
})) < 1e-4).all()
@choose_mark()
def test_add(self):
t1 = ttorch.add(
ttorch.tensor([1, 2, 3]),
ttorch.tensor([3, 5, 11]),
)
assert isinstance(t1, torch.Tensor)
assert (t1 == ttorch.tensor([4, 7, 14])).all()
t2 = ttorch.add(
ttorch.tensor({
'a': [1, 2, 3],
'b': {'x': [[3, 5], [9, 12]]},
}),
ttorch.tensor({
'a': [3, 5, 11],
'b': {'x': [[31, -15], [13, 23]]},
})
)
assert (t2 == ttorch.tensor({
'a': [4, 7, 14],
'b': {'x': [[34, -10],
[22, 35]]},
})).all()
@choose_mark()
def test_sub(self):
t1 = ttorch.sub(
ttorch.tensor([1, 2, 3]),
ttorch.tensor([3, 5, 11]),
)
assert isinstance(t1, torch.Tensor)
assert (t1 == ttorch.tensor([-2, -3, -8])).all()
t2 = ttorch.sub(
ttorch.tensor({
'a': [1, 2, 3],
'b': {'x': [[3, 5], [9, 12]]},
}),
ttorch.tensor({
'a': [3, 5, 11],
'b': {'x': [[31, -15], [13, 23]]},
})
)
assert (t2 == ttorch.tensor({
'a': [-2, -3, -8],
'b': {'x': [[-28, 20],
[-4, -11]]},
})).all()
@choose_mark()
def test_mul(self):
t1 = ttorch.mul(
ttorch.tensor([1, 2, 3]),
ttorch.tensor([3, 5, 11]),
)
assert isinstance(t1, torch.Tensor)
assert (t1 == ttorch.tensor([3, 10, 33])).all()
t2 = ttorch.mul(
ttorch.tensor({
'a': [1, 2, 3],
'b': {'x': [[3, 5], [9, 12]]},
}),
ttorch.tensor({
'a': [3, 5, 11],
'b': {'x': [[31, -15], [13, 23]]},
})
)
assert (t2 == ttorch.tensor({
'a': [3, 10, 33],
'b': {'x': [[93, -75],
[117, 276]]},
})).all()
@choose_mark()
def test_div(self):
t1 = ttorch.div(ttorch.tensor([0.3810, 1.2774, -0.2972, -0.3719, 0.4637]), 0.5)
assert isinstance(t1, torch.Tensor)
assert (t1 == ttorch.tensor([0.7620, 2.5548, -0.5944, -0.7438, 0.9274])).all()
t2 = ttorch.div(
ttorch.tensor([1.3119, 0.0928, 0.4158, 0.7494, 0.3870]),
ttorch.tensor([-1.7501, -1.4652, 0.1379, -1.1252, 0.0380]),
)
assert isinstance(t2, torch.Tensor)
assert (ttorch.abs(t2 - ttorch.tensor([-0.7496, -0.0633, 3.0152, -0.6660, 10.1842])) < 1e-4).all()
t3 = ttorch.div(
ttorch.tensor({
'a': [0.3810, 1.2774, -0.2972, -0.3719, 0.4637],
'b': {
'x': [1.3119, 0.0928, 0.4158, 0.7494, 0.3870],
'y': [[[1.9579, -0.0335, 0.1178],
[0.8287, 1.4520, -0.4696]],
[[-2.1659, -0.5831, 0.4080],
[0.1400, 0.8122, 0.5380]]],
},
}),
ttorch.tensor({
'a': 0.5,
'b': {
'x': [-1.7501, -1.4652, 0.1379, -1.1252, 0.0380],
'y': [[[-1.3136, 0.7785, -0.7290],
[0.6025, 0.4635, -1.1882]],
[[0.2756, -0.4483, -0.2005],
[0.9587, 1.4623, -2.8323]]],
},
}),
)
assert (ttorch.abs(t3 - ttorch.tensor({
'a': [0.7620, 2.5548, -0.5944, -0.7438, 0.9274],
'b': {
'x': [-0.7496, -0.0633, 3.0152, -0.6660, 10.1842],
'y': [[[-1.4905, -0.0430, -0.1616],
[1.3754, 3.1327, 0.3952]],
[[-7.8589, 1.3007, -2.0349],
[0.1460, 0.5554, -0.1900]]],
}
})) < 1e-4).all()
@choose_mark()
def test_pow(self):
t1 = ttorch.pow(
ttorch.tensor([4, 3, 2, 6, 2]),
ttorch.tensor([4, 2, 6, 4, 3]),
)
assert isinstance(t1, torch.Tensor)
assert (t1 == ttorch.tensor([256, 9, 64, 1296, 8])).all()
t2 = ttorch.pow(
ttorch.tensor({
'a': [4, 3, 2, 6, 2],
'b': {
'x': [[3, 4, 6],
[6, 3, 5]],
'y': [[[3, 5, 5],
[5, 7, 6]],
[[4, 6, 5],
[7, 2, 7]]],
},
}),
ttorch.tensor({
'a': [4, 2, 6, 4, 3],
'b': {
'x': [[7, 4, 6],
[5, 2, 6]],
'y': [[[7, 2, 2],
[2, 3, 2]],
[[5, 2, 6],
[7, 3, 4]]],
},
}),
)
assert (t2 == ttorch.tensor({
'a': [256, 9, 64, 1296, 8],
'b': {
'x': [[2187, 256, 46656],
[7776, 9, 15625]],
'y': [[[2187, 25, 25],
[25, 343, 36]],
[[1024, 36, 15625],
[823543, 8, 2401]]],
}
})).all()
@choose_mark()
def test_neg(self):
t1 | |
self.runRestore(configItem, type, endpoint, app, name, user, restoreAsUser, adminLevel, objExists)
found = True
foundAtAnyScope = True
#We never found the config we wanted to restore
if found == False:
logger.info("i=\"%s\" user=%s, name=%s not found at scope=global in typeFile=%s" % (self.stanzaName, user, name, typeFile))
#This type of configuration does not exist at the global level
else:
logger.info("i=\"%s\" user=%s, name=%s, did not find a typeFile=%s to restore from" % (self.stanzaName, user, name, typeFile))
#The global directory for this app does not exist
else:
logger.debug("i=\"%s\" global directory of dir=%s does not exist" % (self.stanzaName, globalDir))
if foundAtAnyScope == True and res_result!=False:
logger.info("i=\"%s\" user=%s restore has run successfully for name=%s, type=%s, restoreAsUser=%s, adminLevel=%s" % (self.stanzaName, user, name, type, restoreAsUser, adminLevel))
return True, message
elif res_result == False and foundAtAnyScope == True:
logger.warn("i=\"%s\" user=%s attempted to restore name=%s, type=%s, restoreAsUser=%s, adminLevel=%s the object was found, but the restore failed" % (self.stanzaName, user, name, type, restoreAsUser, adminLevel))
return False, message
else:
message = "The object was not found, the restore was unsuccessful. Perhaps check the restore date, scope & capitilisation before trying again?"
logger.warn("i=\"%s\" user=%s attempted to restore name=%s, type=%s, restoreAsUser=%s, adminLevel=%s however the object was not found, the restore was unsuccessful. Perhaps check the restore date, scope & capitilisation before trying again?" % (self.stanzaName, user, name, type, restoreAsUser, adminLevel))
return False, message
###########################
#
# runRestore (generic version)
# Once we have received the required configuration, type, app, endpoint, name et cetera we attempt
# to run the post to restore or create the object
#
###########################
def runRestore(self, config, type, endpoint, app, name, user, restoreAsUser, adminLevel, objExists):
result = True
#Only an admin can restore an object owned by someone else
if config['owner'] != user and adminLevel == False:
message = "Owner of the object is listed as owner=%s, however user user=%s requested the restore and is not an admin, rejected" % (config['owner'], user)
logger.error("i=\"" + self.stanzaName + "\"" + message)
return False, message
#Only an admin can use the restoreAsUser option
if restoreAsUser != "" and restoreAsUser != user and adminLevel == False:
message = "restoreAsUser=%s which is not user=%s, this user is not an admin, rejected" % (restoreAsUser, user)
logger.error("i=\"" + self.stanzaName + "\"" + message)
return False, message
#Change the owner to the new oner
if restoreAsUser != "" and adminLevel == True:
config["owner"] = restoreAsUser
logger.info("i=\"%s\" Attempting to run restore for name=%s of type=%s with endpoint=%s user=%s, restoreAsUser=%s, adminLevel=%s, objExists=%s" % (self.stanzaName, name, type, endpoint, user, restoreAsUser, adminLevel, objExists))
sharing = config["sharing"]
owner = config["owner"]
message = ""
createOrUpdate = None
if objExists == True:
createOrUpdate = "update"
else:
createOrUpdate = "create"
headers = {}
auth = None
if not self.destUsername:
headers={'Authorization': 'Splunk %s' % self.session_key}
else:
auth = HTTPBasicAuth(self.destUsername, self.destPassword)
#We cannot post the sharing/owner information to the REST API, we use them later
del config["sharing"]
del config["owner"]
#App / Global scope required the /nobody/ context to be used for POST requests (GET requests do not care)
url = ""
if sharing == "user":
url = "%s/servicesNS/%s/%s%s" % (self.splunk_rest, owner, app, endpoint)
else:
url = "%s/servicesNS/nobody/%s%s" % (self.splunk_rest, app, endpoint)
payload = config
#The config has an origName in it, therefore the object exists lookup may have not worked as expected
#repeat it here for the edge cases (field extractions, field transforms and automatic lookups)
origName = None
if 'origName' in config:
origName = config['origName']
del config['origName']
objExistsURL = "%s/%s?output_mode=json" % (url, origName)
logger.debug("i=\"%s\" URL=%s re-checking object exists URL due to name override from %s to original name of %s proxies_length=%s" % (self.stanzaName, objExistsURL, name, origName, len(self.proxies)))
#Verify=false is hardcoded to workaround local SSL issues
res = requests.get(objExistsURL, auth=auth, headers=headers, verify=self.sslVerify, proxies=self.proxies)
#If we get 404 it definitely does not exist or it has a name override
if (res.status_code == 404):
logger.debug("i=\"%s\" URL=%s is throwing a 404, assuming new object creation" % (self.stanzaName, objExistsURL))
objExists = False
elif (res.status_code != requests.codes.ok):
logger.error("i=\"%s\" URL=%s in app=%s statuscode=%s reason=%s response=\"%s\"" % (self.stanzaName, objExistsURL, app, res.status_code, res.reason, res.text))
else:
#However the fact that we did not get a 404 does not mean it exists in the context we expect it to, perhaps it's global and from another app context?
#or perhaps it's app level but we're restoring a private object...
logger.debug("i=\"%s\" Attempting to JSON loads on %s" % (self.stanzaName, res.text))
resDict = json.loads(res.text)
for entry in resDict['entry']:
sharingLevel = entry['acl']['sharing']
appContext = entry['acl']['app']
appScope = False
userScope = False
if sharing == "global" or sharing == "app":
appScope = True
else:
userScope = True
if appContext == app and appScope == True and (sharingLevel == 'app' or sharingLevel == 'global'):
objExists = True
elif appContext == app and userScope == True and sharingLevel == "user":
objExists = True
logger.debug("i=\"%s\" app=%s objExists=%s after re-checking on %s" % (self.stanzaName, app, objExists, objExistsURL))
#This is an existing object we are modifying
if objExists == True:
createOrUpdate = "update"
if origName:
url = url + "/" + origName
else:
url = url + "/" + name
del config["name"]
#Cannot post type/stanza when updating field extractions or a few other object types, but require them for creation?!
if 'type' in config:
del config['type']
if 'stanza' in config:
del config['stanza']
#Hack to handle the times (conf-times) not including required attributes for creation in existing entries
#not sure how this happens but it fails to create in 7.0.5 but works fine in 7.2.x, fixing for the older versions
if type=="times_conf-times" and "is_sub_menu" not in payload:
payload["is_sub_menu"] = "0"
elif type=="collections_kvstore" and 'disabled' in payload:
del payload['disabled']
logger.debug("i=\"%s\" Attempting to %s type=%s with name=%s on URL=%s with payload=\"%s\" in app=%s proxies_length=%s" % (self.stanzaName, createOrUpdate, type, name, url, payload, app, len(self.proxies)))
res = requests.post(url, auth=auth, headers=headers, verify=self.sslVerify, data=payload, proxies=self.proxies)
if (res.status_code != requests.codes.ok and res.status_code != 201):
logger.error("i=\"%s\" user=%s, name=%s of type=%s with URL=%s statuscode=%s reason=%s, response=\"%s\", in app=%s, owner=%s" % (self.stanzaName, user, name, type, url, res.status_code, res.reason, res.text, app, owner))
#Saved Searches sometimes fail due to the VSID field, auto-retry in case that solves the problem...
if type=="savedsearches":
if 'vsid' in payload:
del payload['vsid']
res = requests.post(url, auth=auth, headers=headers, verify=self.sslVerify, data=payload, proxies=self.proxies)
if (res.status_code != requests.codes.ok and res.status_code != 201):
logger.error("i=\"%s\" user=%s, re-attempted without vsid but result for name=%s of type=%s with URL=%s statuscode=%s reason=%s, response=\"%s\", in app=%s, owner=%s" % (self.stanzaName, user, name, type, url, res.status_code, res.reason, res.text, app, owner))
result = False
else:
logger.info("i=\"%s\" user=%s, name=%s of type=%s with URL=%s successfully %s with the vsid field removed, feel free to ignore the previous error" % (self.stanzaName, user, name, type, url, createOrUpdate))
else:
logger.debug("i=\"%s\" %s name=%s of type=%s in app=%s with URL=%s result=\"%s\" owner=%s" % (self.stanzaName, createOrUpdate, name, type, app, url, res.text, owner))
#Parse the result to find re-confirm the URL and check for messages from Splunk (and log warnings about them)
root = ET.fromstring(res.text)
objURL = None
for child in root:
#Working per entry in the results
if child.tag.endswith("entry"):
#Down to each entry level
for innerChild in child:
#print innerChild.tag
if innerChild.tag.endswith("link") and innerChild.attrib["rel"]=="list":
objURL = "%s/%s" % (self.splunk_rest, innerChild.attrib["href"])
logger.debug("i=\"%s\" name=%s of type=%s in app=%s URL=%s" % (self.stanzaName, name, type, app, objURL))
elif child.tag.endswith("messages"):
for innerChild in child:
if innerChild.tag.endswith("msg") and innerChild.attrib["type"]=="ERROR" or "WARN" in innerChild.attrib:
logger.warn("i=\"%s\" name=%s of type=%s in app=%s had a warn/error message of '%s' owner=%s" % (self.stanzaName, name, type, app, innerChild.text, owner))
#Sometimes the object appears to be create but is unusable which is annoying, at least provide the warning to the logs
if not objURL:
message = "never found objURL so cannot complete ACL change with url=%s, response text=\"%s\" when looking for name=%s, type=%s app=%s, owner=%s" % (url, res.text, name, type, app, owner)
logger.warn("i=\"" + self.stanzaName + "\"" + message)
return False, message
#Re-owning it to the previous owner and sharing level
url = "%s/acl" % (objURL)
payload = { "owner": owner, "sharing" | |
assoc.is_established
msg = (
r"No exact matching context found for 'Unified Procedure Step "
r"- Push SOP Class', checking accepted contexts for other UPS "
r"SOP classes"
)
ds = Dataset()
ds.TransactionUID = '1.2.3.4'
with caplog.at_level(logging.DEBUG, logger='pynetdicom'):
responses = assoc.send_c_find(ds, UnifiedProcedureStepPushSOPClass)
assert msg in caplog.text
assoc.release()
scp.shutdown()
def test_allow_conversion(self):
"""Test allow_conversion=False."""
self.ae = ae = AE()
ae.acse_timeout = 5
ae.dimse_timeout = 5
ae.network_timeout = 5
ae.add_supported_context(CTImageStorage, ImplicitVRLittleEndian)
ae.add_supported_context(CTImageStorage, ExplicitVRLittleEndian)
scp = ae.start_server(('', 11112), block=False)
ae.add_requested_context(CTImageStorage, ImplicitVRLittleEndian)
#ae.add_requested_context(CTImageStorage, ExplicitVRLittleEndian)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
msg = (
r"No presentation context for 'CT Image Storage' has been "
r"accepted by the peer with 'Explicit VR"
)
with pytest.raises(ValueError, match=msg):
assoc._get_valid_context(
CTImageStorage,
ExplicitVRLittleEndian,
'scu',
allow_conversion=False
)
assoc.release()
scp.shutdown()
class TestEventHandlingAcceptor(object):
"""Test the transport events and handling as acceptor."""
def setup(self):
self.ae = None
_config.LOG_HANDLER_LEVEL = 'none'
def teardown(self):
if self.ae:
self.ae.shutdown()
_config.LOG_HANDLER_LEVEL = 'standard'
def test_no_handlers(self):
"""Test with no association event handlers bound."""
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.release()
scp.shutdown()
def test_no_handlers_unbind(self):
"""Test unbinding a handler that's not bound."""
_config.LOG_HANDLER_LEVEL = 'standard'
def dummy(event):
pass
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
assert dummy not in scp._handlers[evt.EVT_DIMSE_SENT]
scp.unbind(evt.EVT_DIMSE_SENT, dummy)
assert dummy not in scp._handlers[evt.EVT_DIMSE_SENT]
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert dummy not in assoc._handlers[evt.EVT_DIMSE_SENT]
assoc.unbind(evt.EVT_DIMSE_SENT, dummy)
assert dummy not in assoc._handlers[evt.EVT_DIMSE_SENT]
child = scp.active_associations[0]
assert dummy not in child._handlers[evt.EVT_DIMSE_SENT]
child.unbind(evt.EVT_DIMSE_SENT, dummy)
assert dummy not in child._handlers[evt.EVT_DIMSE_SENT]
assoc.release()
scp.shutdown()
def test_unbind_intervention(self):
"""Test unbinding a user intervention handler."""
def dummy(event):
pass
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
scp.bind(evt.EVT_C_ECHO, dummy)
assert scp.get_handlers(evt.EVT_C_ECHO) == (dummy, None)
scp.unbind(evt.EVT_C_ECHO, dummy)
assert scp.get_handlers(evt.EVT_C_ECHO) != (dummy, None)
assert scp.get_handlers(evt.EVT_C_ECHO) == (evt._c_echo_handler, None)
scp.shutdown()
def test_unbind_intervention_assoc(self):
"""Test unbinding a user intervention handler."""
def dummy(event):
pass
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
scp = ae.start_server(('', 11112), block=False)
scp.bind(evt.EVT_C_ECHO, dummy)
assert scp.get_handlers(evt.EVT_C_ECHO) == (dummy, None)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_C_ECHO) == (dummy, None)
scp.unbind(evt.EVT_C_ECHO, dummy)
assert scp.get_handlers(evt.EVT_C_ECHO) != (dummy, None)
assert scp.get_handlers(evt.EVT_C_ECHO) == (evt._c_echo_handler, None)
assert child.get_handlers(evt.EVT_C_ECHO) != (dummy, None)
assert child.get_handlers(evt.EVT_C_ECHO) == (
evt._c_echo_handler, None
)
assoc.release()
scp.shutdown()
def test_abort(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ABORTED'
scp.shutdown()
def test_abort_bind(self):
"""Test binding a handler to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
scp.bind(evt.EVT_ABORTED, handle)
assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ABORTED'
scp.shutdown()
def test_abort_unbind(self):
"""Test starting with handler bound to EVT_ABORTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
assert scp.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == [(handle, None)]
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
scp.unbind(evt.EVT_ABORTED, handle)
assert scp.get_handlers(evt.EVT_ABORTED) == []
assert scp.get_handlers(evt.EVT_ACCEPTED) == []
assert scp.get_handlers(evt.EVT_ESTABLISHED) == []
assert scp.get_handlers(evt.EVT_REJECTED) == []
assert scp.get_handlers(evt.EVT_RELEASED) == []
assert scp.get_handlers(evt.EVT_REQUESTED) == []
assert assoc.get_handlers(evt.EVT_ABORTED) == []
assert assoc.get_handlers(evt.EVT_ACCEPTED) == []
assert assoc.get_handlers(evt.EVT_ESTABLISHED) == []
assert assoc.get_handlers(evt.EVT_REJECTED) == []
assert assoc.get_handlers(evt.EVT_RELEASED) == []
assert assoc.get_handlers(evt.EVT_REQUESTED) == []
child = scp.active_associations[0]
assert child.get_handlers(evt.EVT_ABORTED) == []
assert child.get_handlers(evt.EVT_ACCEPTED) == []
assert child.get_handlers(evt.EVT_ESTABLISHED) == []
assert child.get_handlers(evt.EVT_REJECTED) == []
assert child.get_handlers(evt.EVT_RELEASED) == []
assert child.get_handlers(evt.EVT_REQUESTED) == []
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 0
scp.shutdown()
def test_abort_local(self):
"""Test the handler bound to EVT_ABORTED with local requested abort."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assert len(scp.active_associations) == 1
scp.active_associations[0].abort()
while scp.active_associations:
time.sleep(0.05)
assert len(triggered) == 1
event = triggered[0]
assert isinstance(event, Event)
assert isinstance(event.assoc, Association)
assert isinstance(event.timestamp, datetime)
assert event.event.name == 'EVT_ABORTED'
scp.shutdown()
def test_abort_raises(self, caplog):
"""Test the handler for EVT_ACCEPTED raising exception."""
def handle(event):
raise NotImplementedError("Exception description")
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ABORTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
with caplog.at_level(logging.ERROR, logger='pynetdicom'):
assoc = ae.associate('localhost', 11112)
assert assoc.is_established
assoc.abort()
while scp.active_associations:
time.sleep(0.05)
scp.shutdown()
msg = (
"Exception raised in user's 'evt.EVT_ABORTED' event handler"
" 'handle'"
)
assert msg in caplog.text
assert "Exception description" in caplog.text
def test_accept(self):
"""Test starting with handler bound to EVT_ACCEPTED."""
triggered = []
def handle(event):
triggered.append(event)
self.ae = ae = AE()
ae.add_supported_context(VerificationSOPClass)
ae.add_requested_context(VerificationSOPClass)
handlers = [(evt.EVT_ACCEPTED, handle)]
scp = ae.start_server(('', 11112), block=False, evt_handlers=handlers)
| |
<gh_stars>0
import gym
from enum import Enum, IntEnum
import numpy as np
import os
from datetime import datetime
import time
import copy
from typing import List
from utilities import tensor_from
import torch
import random
import sys
import math
if sys.platform == 'win32':
import imageio
import pyglet
from pyglet.gl import *
import arcade
class AcademicPapers(Enum):
LearningModelBasedPlanningFromScratch = 1
MetacontrolForAdaptiveImaginationBasedOptimization = 2
SETTINGS_FROM_PAPERS = {
AcademicPapers.MetacontrolForAdaptiveImaginationBasedOptimization: {
"n_actions_per_episode": 1,
"euler_method_step_size": 0.05,
"gravitational_constant": 1000000,
"damping_constant": 0.1,
"fuel_price": None,
"fuel_cost_threshold": None,
"agent_ship_random_mass_interval": (1, 9),
"agent_ship_random_radial_distance_interval": (150, 250),
"planets_random_mass_interval": (20, 50),
"planets_random_radial_distance_interval": (100, 250),
"sun_mass": 100,
"sun_random_radial_distance_interval": (100, 200)
},
AcademicPapers.LearningModelBasedPlanningFromScratch: {
"n_actions_per_episode": 3,
"euler_method_step_size": 0.05,
"gravitational_constant": 10, # I've guessed this value as the paper doesn't mention it
"damping_constant": 0.1, # I've guessed this value as the paper doesn't mention it
"fuel_price": 0.0002,
"fuel_cost_threshold": 8,
"agent_ship_random_mass_interval": (0.004, 0.36),
"agent_ship_random_radial_distance_interval": (0.6, 1.0),
"planets_random_mass_interval": (0.08, 0.4),
"planets_random_radial_distance_interval": (0.4, 1.0),
"sun_mass": None,
"sun_random_radial_distance_interval": None
}
}
class GravityCap(IntEnum):
No = 1
Low = 2
High = 3
Realistic = 4
DEFAULT = object() # Set init argument to DEFAULT to get setting from the chosen paper's experimental setup
class SpaceshipEnvironment(gym.Env):
"""An OpenAI Gym environment of the Spaceship task.
In this continuous control task, a spaceship floats amongst a handful of stationary planets. The agent's goal is to get the spaceship as close as possible to the goal position (the center at x=0, y=0) by the end of the episode. It can fire its thrusters every few timesteps with a certain force and direction. Apart from that, the ship is subject to the gravitational pull of the planets (which themselves don't move).
Optionally, a fuel cost can be set, such that the agent needs to reach the goal using a minimum amount of thruster force.
Observation:
Dict(3) {
'action_required' (boolean): Whether this is a step in which the agent can execute an action, i.e. fire the ships thrusters. When False, actions passed to step() will be ignored.
'agent_ship' (`Ship` object): The current state of the agent ship.
'planets' (list of `Planet` objects: The current state of the planets.
}
You can get Numpy vector representations with `Ship.encode_as_vector()` and `Planet.encode_as_vector()`.
Actions:
Continuous(2) [
0 Thruster force in the x direction
1 Thruster force in the y direction
]
Example usage:
game = SpaceshipEnvironment(n_planets = 3)
observation = game.reset()
game.render()
agent = YourAlgorithm()
while True:
if observation["action_required"]: # Indicates whether this is a timestep at which the thrusters can be fired
action = agent.forward(observation)
else:
action = None
observation, reward, done, _ = game.step(action)
game.render()
if done:
observation = game.reset()
game.render()
"""
metadata = {'render.modes': ['human']}
def __init__(self,
n_planets,
default_settings_from=AcademicPapers.LearningModelBasedPlanningFromScratch,
n_actions_per_episode=DEFAULT,
n_steps_per_action=12,
euler_method_step_size=DEFAULT,
gravitational_constant=DEFAULT,
damping_constant=DEFAULT,
fuel_price=DEFAULT,
fuel_cost_threshold=DEFAULT,
agent_ship_random_mass_interval=DEFAULT,
agent_ship_random_radial_distance_interval=DEFAULT,
planets_random_mass_interval=DEFAULT,
planets_random_radial_distance_interval=DEFAULT,
sun_mass=DEFAULT,
sun_random_radial_distance_interval=DEFAULT,
n_secondary_planets=0,
secondary_planets_random_mass_interval=(0.0, 0.0),
secondary_planets_random_radial_distance_interval=(1.8, 2.0),
cap_gravity=GravityCap.Low,
render_window_size=900,
store_episode_as_gif=False,
gif_file_name="",
render_after_each_step=False,
n_seconds_sleep_per_render=0.05,
euler_scale=1,
implicit_euler=False,
with_beacons=False,
beacon_probability=1,
beacon_radial_distance_interval=(0.0, 1.5),
n_ice_rocks=0
):
"""
Args:
n_planets (int): The number of planets randomly positioned in each episode.
default_settings_from (enum): The academic paper of which the settings are used for arguments set to `DEFAULT`. You can overrule any settings you want; anything you don't set will be as in this paper's experiments.
n_actions_per_episode (int): The number of (real, non-imagined) actions the agent executes per episode.
n_steps_per_action (int): If e.g. 12, the agent executes an action at one timestep and then waits for 11 steps of physics simulations before executing the next.
euler_method_step_size (float): The time resolution (smaller is more fine-grained / less movement per step) at which the physics are simulated.
gravitational_constant (float): The magnitude of gravity. See C.2 in "Metacontrol for adaptive imagination-based optimization".
damping_constant (float): Controls inertia; dampens the effect of gravity on the spaceship. See C.2 in "Metacontrol for adaptive imagination-based optimization".
fuel_price (float): Adds linearly to the agent's loss when it uses a force larger than `fuel_cost_threshold`: fuel cost = max(0, `fuel_price` * (force - `fuel_cost_threshold`)).
fuel_cost_threshold (float): See above.
agent_ship_random_mass_interval (2-tuple of floats): The interval from which the agent ship's mass will be uniformly sampled for each episode.
agent_ship_random_radial_distance_interval (2-tuple of floats): The interval at which the agent's initial position will be sampled for each episode. First, the radial distance from the goal position is uniformly sampled from `agent_ship_random_radial_distance_interval`. Next, the angle is uniformly sampled from [0, 360].
planets_random_mass_interval (2-tuple of floats): See above.
planets_random_radial_distance_interval (2-tuple of floats): See above.
sun_mass (float): When not None, the first planet's mass will not be randomly sampled, but set to `sun_mass`.
sun_random_radial_distance_interval (float): When not None, the first planet's distance will be sampled from `sun_random_radial_distance_interval` instead of `planets_random_radial_distance_interval`.
render_window_size (int): The height and width of the window when rendering (window is always square).
store_episode_as_gif (boolean): When True, all finished episodes are stored as animated GIFs.
gif_file_name (string): When not None, all animated GIF file names will be prefixed with `gif_file_name`. Otherwise, a time stamp will be used.
"""
parameters = locals().copy()
# Bit of an unconventional init method, but it enables two sets of default argument values depending on `default_settings_from`. Any argument set to `DEFAULT` will get its value from the dict above.
for parameter, value in parameters.items():
if value is DEFAULT:
self.__setattr__(parameter, SETTINGS_FROM_PAPERS[default_settings_from][parameter])
else:
self.__setattr__(parameter, value)
self.agent_ship = None
self.planets = None
self.beacons = None
self.i_step = None
self.episode_cumulative_loss = None
self.render_window = None
self.lowest_zoom_factor_this_episode = None
# A list of lists of xy-positions: one list for every `n_steps_per_action` steps. For rendering the ship's trajectory through space.
self.past_ship_trajectories = None
self.imagined_ship_trajectories = None
self.estimated_ship_trajectories = None
# Determines how zoomed out the rendering initially is.
self.minimally_visible_world_size = 2.5 * max([
radius_interval[1] for radius_interval
in [self.agent_ship_random_radial_distance_interval, self.planets_random_radial_distance_interval, self.sun_random_radial_distance_interval]
if radius_interval is not None
])
self.mass_to_pixel_ratio = 50 / max([
mass for mass
in [self.agent_ship_random_mass_interval[1], self.planets_random_mass_interval[1], self.sun_mass]
if mass is not None
])
self.minimally_visible_world_radius = self.minimally_visible_world_size / 2
self.render_window_radius = render_window_size / 2
if not self.gif_file_name:
self.gif_file_name = 'experiment_{date:%Y%m%d_%H%M%S}'.format(date=datetime.now())
self.gif_file_name += '-'
self.i_episode = -1
def reset(self):
self.agent_ship = Ship(
random_mass_interval=self.agent_ship_random_mass_interval,
random_radial_distance_interval=self.agent_ship_random_radial_distance_interval,
ide=0
)
self.planets = [
Planet(
random_mass_interval=self.planets_random_mass_interval,
random_radial_distance_interval=self.planets_random_radial_distance_interval,
ide=i + 1
)
for i in range(self.n_planets)
]
if self.n_planets > 0 and self.sun_random_radial_distance_interval:
self.planets[0] = Planet(
random_mass_interval=self.planets_random_mass_interval,
random_radial_distance_interval=self.sun_random_radial_distance_interval,
ide=1
)
if self.n_planets > 0 and self.sun_mass:
self.planets[0].mass = self.sun_mass
for i in range(self.n_secondary_planets):
self.planets.append(Planet(
random_mass_interval=self.secondary_planets_random_mass_interval,
random_radial_distance_interval=self.secondary_planets_random_radial_distance_interval,
is_secondary=True,
ide=self.n_planets + 1 + i
))
for i in range(self.n_ice_rocks):
self.planets.append(IceRock(
random_mass_interval=self.planets_random_mass_interval,
random_radial_distance_interval=self.planets_random_radial_distance_interval,
ide=self.n_planets + self.n_secondary_planets + 1 + i
))
if self.with_beacons and np.random.rand() <= self.beacon_probability:
self.beacons = [Beacon(
mass=0,
random_radial_distance_interval=self.beacon_radial_distance_interval,
ide=self.n_planets + self.n_ice_rocks + self.n_secondary_planets + 1
)]
else:
self.beacons = []
self.i_step = 0
self.lowest_zoom_factor_this_episode = 1
for planet in self.planets + self.beacons:
self.update_zoom_factor(planet.x, planet.y)
self.past_ship_trajectories = []
self.imagined_ship_trajectories = []
self.estimated_ship_trajectories = []
self.episode_cumulative_loss = 0
return self.observation(True)
def obj(self, ide):
return self.objs()[ide]
def n_obj(self):
return len(self.objs())
def objs(self):
return [self.agent_ship] + self.planets + self.beacons
def observation(self, increase_episode=False):
if self.render_after_each_step:
self.render()
if increase_episode:
self.i_episode += 1
return {
'action_required': self.first_step_of_action(),
'agent_ship': self.agent_ship,
'planets': self.planets
}
def step(self, xy_thrust_force=None):
if not self.first_step_of_action() or xy_thrust_force is None:
# Ignore the action when this is a timestep at which the thrusters may not be fired.
xy_thrust_force = np.zeros(2)
else:
# This is the first timestep after a new action; initialize the list of past positions for trajectory visualization.
self.past_ship_trajectories.append([(self.agent_ship.x, self.agent_ship.y)])
for _ in range(self.euler_scale):
xy_gravitational_forces = []
for planet in self.planets:
if planet.type is SpaceObject.Types.ICE_ROCK:
continue
pretended_radius = np.linalg.norm(self.agent_ship.xy_position - planet.xy_position)
pretended_xy_distance = planet.xy_position - self.agent_ship.xy_position
if self.cap_gravity is not GravityCap.No:
minimal_radius = planet.mass
if self.cap_gravity is GravityCap.Realistic:
minimal_radius = np.cbrt(planet.mass / (2 * math.pi))
if self.cap_gravity is GravityCap.High:
minimal_radius += self.agent_ship.mass * 2.8
if pretended_radius < minimal_radius:
pretended_radius = minimal_radius
actual_angle, actual_radius = cartesian2polar(pretended_xy_distance[0], pretended_xy_distance[1])
pretended_xy_distance = np.array(polar2cartesian(actual_angle, pretended_radius))
xy_gravitational_forces.append(self.gravitational_constant * planet.mass * self.agent_ship.mass * pretended_xy_distance / pretended_radius ** 3)
xy_acceleration = (sum(xy_gravitational_forces) - self.damping_constant * self.agent_ship.xy_velocity + xy_thrust_force) / self.agent_ship.mass
if self.implicit_euler:
self.agent_ship.xy_velocity += self.euler_method_step_size / self.euler_scale * xy_acceleration
self.agent_ship.xy_position += self.euler_method_step_size / self.euler_scale * self.agent_ship.xy_velocity
else:
self.agent_ship.xy_position += self.euler_method_step_size / self.euler_scale * self.agent_ship.xy_velocity
self.agent_ship.xy_velocity += self.euler_method_step_size / self.euler_scale * xy_acceleration
self.past_ship_trajectories[-1].append((self.agent_ship.x, self.agent_ship.y))
self.i_step += 1
fuel_cost = max(
0,
(np.absolute(xy_thrust_force).sum() | |
"""Client to access DICOM Part10 files through a layer of abstraction."""
import collections
import io
import logging
import math
import os
import re
import sqlite3
import sys
import time
import traceback
from collections import OrderedDict
from enum import Enum
from pathlib import Path
from typing import (
Any,
Dict,
Iterator,
Iterable,
List,
Mapping,
Optional,
Sequence,
Tuple,
Union,
)
import numpy as np
from PIL import Image
from PIL.ImageCms import ImageCmsProfile, createProfile
from pydicom.dataset import Dataset, FileMetaDataset
from pydicom.encaps import encapsulate, get_frame_offsets
from pydicom.errors import InvalidDicomError
from pydicom.filebase import DicomFileLike
from pydicom.datadict import dictionary_VR, keyword_for_tag, tag_for_keyword
from pydicom.filereader import (
data_element_offset_to_value,
dcmread,
read_file_meta_info,
read_partial,
)
from pydicom.filewriter import dcmwrite
from pydicom.pixel_data_handlers.numpy_handler import unpack_bits
from pydicom.tag import (
BaseTag,
ItemTag,
SequenceDelimiterTag,
Tag,
TupleTag,
)
from pydicom.uid import UID
from pydicom.valuerep import DA, DT, TM
logger = logging.getLogger(__name__)
_FLOAT_PIXEL_DATA_TAGS = {0x7FE00008, 0x7FE00009, }
_UINT_PIXEL_DATA_TAGS = {0x7FE00010, }
_PIXEL_DATA_TAGS = _FLOAT_PIXEL_DATA_TAGS.union(_UINT_PIXEL_DATA_TAGS)
_JPEG_SOI_MARKER = b'\xFF\xD8' # also JPEG-LS
_JPEG_EOI_MARKER = b'\xFF\xD9' # also JPEG-LS
_JPEG2000_SOC_MARKER = b'\xFF\x4F'
_JPEG2000_EOC_MARKER = b'\xFF\xD9'
_START_MARKERS = {_JPEG_SOI_MARKER, _JPEG2000_SOC_MARKER}
_END_MARKERS = {_JPEG_EOI_MARKER, _JPEG2000_EOC_MARKER}
def _get_bot(fp: DicomFileLike, number_of_frames: int) -> List[int]:
"""Read or build the Basic Offset Table (BOT).
Parameters
----------
fp: pydicom.filebase.DicomFileLike
Pointer for DICOM PS3.10 file stream positioned at the first byte of
the Pixel Data element
number_of_frames: int
Number of frames contained in the Pixel Data element
Returns
-------
List[int]
Offset of each Frame item in bytes from the first byte of the Pixel
Data element following the BOT item
Note
----
Moves the pointer to the first byte of the open file following the BOT item
(the first byte of the first Frame item).
"""
logger.debug('read Basic Offset Table')
basic_offset_table = _read_bot(fp)
first_frame_offset = fp.tell()
tag = TupleTag(fp.read_tag())
if int(tag) != ItemTag:
raise ValueError('Reading of Basic Offset Table failed')
fp.seek(first_frame_offset, 0)
# Basic Offset Table item must be present, but it may be empty
if len(basic_offset_table) == 0:
logger.debug('Basic Offset Table item is empty')
if len(basic_offset_table) != number_of_frames:
logger.debug('build Basic Offset Table item')
basic_offset_table = _build_bot(
fp,
number_of_frames=number_of_frames
)
return basic_offset_table
def _read_bot(fp: DicomFileLike) -> List[int]:
"""Read the Basic Offset Table (BOT) of an encapsulated Pixel Data element.
Parameters
----------
fp: pydicom.filebase.DicomFileLike
Pointer for DICOM PS3.10 file stream positioned at the first byte of
the Pixel Data element
Returns
-------
List[int]
Offset of each Frame item in bytes from the first byte of the Pixel
Data element following the BOT item
Note
----
Moves the pointer to the first byte of the open file following the BOT item
(the first byte of the first Frame item).
Raises
------
IOError
When file pointer is not positioned at first byte of Pixel Data element
"""
tag = TupleTag(fp.read_tag())
if int(tag) not in _PIXEL_DATA_TAGS:
raise IOError(
'Expected file pointer at first byte of Pixel Data element.'
)
# Skip Pixel Data element header (tag, VR, length)
pixel_data_element_value_offset = data_element_offset_to_value(
fp.is_implicit_VR, 'OB'
)
fp.seek(pixel_data_element_value_offset - 4, 1)
is_empty, offsets = get_frame_offsets(fp)
return offsets
def _build_bot(fp: DicomFileLike, number_of_frames: int) -> List[int]:
"""Build a Basic Offset Table (BOT) for an encapsulated Pixel Data element.
Parameters
----------
fp: pydicom.filebase.DicomFileLike
Pointer for DICOM PS3.10 file stream positioned at the first byte of
the Pixel Data element following the empty Basic Offset Table (BOT)
number_of_frames: int
Total number of frames in the dataset
Returns
-------
List[int]
Offset of each Frame item in bytes from the first byte of the Pixel
Data element following the BOT item
Note
----
Moves the pointer back to the first byte of the Pixel Data element
following the BOT item (the first byte of the first Frame item).
Raises
------
IOError
When file pointer is not positioned at first byte of first Frame item
after Basic Offset Table item or when parsing of Frame item headers
fails
ValueError
When the number of offsets doesn't match the specified number of frames
"""
initial_position = fp.tell()
offset_values = []
current_offset = 0
i = 0
while True:
frame_position = fp.tell()
tag = TupleTag(fp.read_tag())
if int(tag) == SequenceDelimiterTag:
break
if int(tag) != ItemTag:
fp.seek(initial_position, 0)
raise IOError(
'Building Basic Offset Table (BOT) failed. Expected tag of '
f'Frame item #{i} at position {frame_position}.'
)
length = fp.read_UL()
if length % 2:
fp.seek(initial_position, 0)
raise IOError(
'Building Basic Offset Table (BOT) failed. '
f'Length of Frame item #{i} is not a multiple of 2.'
)
elif length == 0:
fp.seek(initial_position, 0)
raise IOError(
'Building Basic Offset Table (BOT) failed. '
f'Length of Frame item #{i} is zero.'
)
first_two_bytes = fp.read(2)
if not fp.is_little_endian:
first_two_bytes = first_two_bytes[::-1]
# In case of fragmentation, we only want to get the offsets to the
# first fragment of a given frame. We can identify those based on the
# JPEG and JPEG 2000 markers that should be found at the beginning and
# end of the compressed byte stream.
if first_two_bytes in _START_MARKERS:
current_offset = frame_position - initial_position
offset_values.append(current_offset)
i += 1
fp.seek(length - 2, 1) # minus the first two bytes
if len(offset_values) != number_of_frames:
raise ValueError(
'Number of frame items does not match specified Number of Frames.'
)
else:
basic_offset_table = offset_values
fp.seek(initial_position, 0)
return basic_offset_table
class _ImageFileReader:
"""Class for reading DICOM files that represent Image Information Entities.
The class provides methods for efficient access to individual Frame items
contained in the Pixel Data element of a Data Set stored in a Part10 file
on disk without loading the entire element into memory.
"""
def __init__(self, fp: Union[str, Path, DicomFileLike]):
"""
Parameters
----------
fp: Union[str, pathlib.Path, pydicom.filebase.DicomfileLike]
DICOM Part10 file containing a dataset of an image SOP Instance
"""
self._filepointer: Union[DicomFileLike, None]
self._filepath: Union[Path, None]
if isinstance(fp, DicomFileLike):
is_little_endian, is_implicit_VR = self._check_file_format(fp)
try:
if fp.is_little_endian != is_little_endian:
raise ValueError(
'Transfer syntax of file object has incorrect value '
'for attribute "is_little_endian".'
)
except AttributeError:
raise AttributeError(
'Transfer syntax of file object does not have '
'attribute "is_little_endian".'
)
try:
if fp.is_implicit_VR != is_implicit_VR:
raise ValueError(
'Transfer syntax of file object has incorrect value '
'for attribute "is_implicit_VR".'
)
except AttributeError:
raise AttributeError(
'Transfer syntax of file object does not have '
'attribute "is_implicit_VR".'
)
self._filepointer = fp
self._filepath = None
elif isinstance(fp, (str, Path)):
self._filepath = Path(fp)
self._filepointer = None
else:
raise TypeError(
'Argument "filename" must either an open DICOM file object or '
'the path to a DICOM file stored on disk.'
)
# Those attributes will be set by the "open()"
self._metadata: Dataset = Dataset()
self._is_open = False
self._as_float = False
self._bytes_per_frame_uncompressed: int = -1
self._basic_offset_table: List[int] = []
self._first_frame_offset: int = -1
self._pixel_data_offset: int = -1
self._pixels_per_frame: int = -1
def _check_file_format(self, fp: DicomFileLike) -> Tuple[bool, bool]:
"""Check whether file object represents a DICOM Part 10 file.
Parameters
----------
fp: pydicom.filebase.DicomFileLike
DICOM file object
Returns
-------
is_little_endian: bool
Whether the data set is encoded in little endian transfer syntax
is_implicit_VR: bool
Whether value representations of data elements in the data set
are implicit
Raises
------
InvalidDicomError
If the file object does not represent a DICOM Part 10 file
"""
def is_main_tag(tag: BaseTag, VR: Optional[str], length: int) -> bool:
return tag >= 0x00040000
pos = fp.tell()
ds = read_partial(fp, stop_when=is_main_tag) # type: ignore
fp.seek(pos)
transfer_syntax_uid = UID(ds.file_meta.TransferSyntaxUID)
return (
transfer_syntax_uid.is_little_endian,
transfer_syntax_uid.is_implicit_VR,
)
def __enter__(self) -> '_ImageFileReader':
self.open()
return self
def __exit__(self, except_type, except_value, except_trace) -> None:
self._fp.close()
if except_value:
sys.stdout.write(
'Error while accessing file "{}":\n{}'.format(
self._filepath, str(except_value)
)
)
for tb in traceback.format_tb(except_trace):
sys.stdout.write(tb)
raise
@property
def _fp(self) -> DicomFileLike:
if self._filepointer is None:
raise IOError('File has not been opened for reading.')
return self._filepointer
def open(self) -> None:
"""Open file for reading.
Raises
------
FileNotFoundError
When file cannot be found
OSError
When file cannot be opened
IOError
When DICOM metadata cannot be read from file
ValueError
When DICOM dataset contained in file does not represent an image
Note
----
Reads the metadata of the DICOM Data Set contained in the file and
builds a Basic Offset Table to speed up subsequent frame-level access.
"""
# This methods sets several attributes on the object, which cannot
# (or should not) be set in the constructor. Other methods assert that
# | |
import addrmodes
# TODO: Verificar se nao existem enderecamentos maiores que 1 byte
def rel_addr(value):
if value & 0b10000000:
value &= 0b1111111
value -= 128
return value
def advancePC(cpu, size):
cpu.registers['PC'] += size
def setN(cpu, value):
if value & (1 << 7) == 1 << 7:
cpu.setStatus(cpu.statusFlags['n'], 1)
else:
cpu.setStatus(cpu.statusFlags['n'], 0)
def setZ(cpu, value):
if value == 0:
cpu.setStatus(cpu.statusFlags['z'], 1)
else:
cpu.setStatus(cpu.statusFlags['z'], 0)
def setO(cpu, value):
cpu.setStatus(cpu.statusFlags['v'], value)
def setC(cpu, value):
cpu.setStatus(cpu.statusFlags['c'], value)
def ADC_Immediate(cpu):
size = 2
nCycles = 2
value = cpu.readMemory(cpu.registers['PC'] + 1)
carry = cpu.getStatus(cpu.statusFlags['c'])
tmp = value + cpu.registers['A'] + carry
setO(
cpu, not (((cpu.registers['A'] ^ value) & 0x80) != 0)
and (((cpu.registers['A'] ^ tmp) & 0x80)))
setC(cpu, tmp > 0xFF)
setN(cpu, tmp)
setZ(cpu, tmp & 0xFF)
cpu.registers['A'] = (tmp & 0xFF)
advancePC(cpu, size)
return nCycles
def ADC_Zero(cpu):
size = 2
nCycles = 3
address = addrmodes.Zero(cpu)
value = cpu.readMemory(address)
carry = cpu.getStatus(cpu.statusFlags['c'])
tmp = value + cpu.registers['A'] + carry
setO(
cpu, not (((cpu.registers['A'] ^ value) & 0x80) != 0)
and (((cpu.registers['A'] ^ tmp) & 0x80)))
setC(cpu, tmp > 0xFF)
setN(cpu, tmp)
setZ(cpu, tmp & 0xFF)
cpu.registers['A'] = (tmp & 0xFF)
advancePC(cpu, size)
return nCycles
def ADC_Zero_X(cpu):
size = 2
nCycles = 4
address = addrmodes.Zero_X(cpu)
value = cpu.readMemory(address)
carry = cpu.getStatus(cpu.statusFlags['c'])
tmp = value + cpu.registers['A'] + carry
setO(
cpu, not (((cpu.registers['A'] ^ value) & 0x80) != 0)
and (((cpu.registers['A'] ^ tmp) & 0x80)))
setC(cpu, tmp > 255)
setN(cpu, tmp)
setZ(cpu, tmp & 0xFF)
cpu.registers['A'] = (tmp & 0xFF)
advancePC(cpu, size)
return nCycles
def ADC_Absolute(cpu):
size = 3
nCycles = 4
address = addrmodes.Absolute(cpu)
value = cpu.readMemory(address)
carry = cpu.getStatus(cpu.statusFlags['c'])
tmp = value + cpu.registers['A'] + carry
setO(
cpu, not (((cpu.registers['A'] ^ value) & 0x80) != 0)
and (((cpu.registers['A'] ^ tmp) & 0x80)))
setC(cpu, tmp > 255)
setN(cpu, tmp)
setZ(cpu, tmp & 0xFF)
cpu.registers['A'] = (tmp & 0xFF)
advancePC(cpu, size)
return nCycles
def ADC_Absolute_X(cpu):
size = 3
nCycles = 4
address = addrmodes.Absolute_X(cpu)
value = cpu.readMemory(address)
carry = cpu.getStatus(cpu.statusFlags['c'])
tmp = value + cpu.registers['A'] + carry
setO(
cpu, not (((cpu.registers['A'] ^ value) & 0x80) != 0)
and (((cpu.registers['A'] ^ tmp) & 0x80)))
setC(cpu, tmp > 255)
setN(cpu, tmp)
setZ(cpu, tmp & 0xFF)
cpu.registers['A'] = (tmp & 0xFF)
advancePC(cpu, size)
return nCycles
def ADC_Absolute_Y(cpu):
size = 3
nCycles = 4
address = addrmodes.Absolute_Y(cpu)
value = cpu.readMemory(address)
carry = cpu.getStatus(cpu.statusFlags['c'])
tmp = value + cpu.registers['A'] + carry
setO(
cpu, not (((cpu.registers['A'] ^ value) & 0x80) != 0)
and (((cpu.registers['A'] ^ tmp) & 0x80)))
setC(cpu, tmp > 255)
setN(cpu, tmp)
setZ(cpu, tmp & 0xFF)
cpu.registers['A'] = (tmp & 0xFF)
advancePC(cpu, size)
return nCycles
def ADC_Indirect_X(cpu):
size = 2
nCycles = 6
address = addrmodes.Indirect_X(cpu)
value = cpu.readMemory(address)
carry = cpu.getStatus(cpu.statusFlags['c'])
tmp = value + cpu.registers['A'] + carry
setO(
cpu, not (((cpu.registers['A'] ^ value) & 0x80) != 0)
and (((cpu.registers['A'] ^ tmp) & 0x80)))
setC(cpu, tmp > 255)
setN(cpu, tmp)
setZ(cpu, tmp & 0xFF)
cpu.registers['A'] = (tmp & 0xFF)
advancePC(cpu, size)
return nCycles
def ADC_Indirect_Y(cpu):
size = 2
nCycles = 5
address = addrmodes.Indirect_Y(cpu)
value = cpu.readMemory(address)
carry = cpu.getStatus(cpu.statusFlags['c'])
tmp = value + cpu.registers['A'] + carry
setO(
cpu, not (((cpu.registers['A'] ^ value) & 0x80) != 0)
and (((cpu.registers['A'] ^ tmp) & 0x80)))
setC(cpu, tmp > 255)
setN(cpu, tmp)
setZ(cpu, tmp & 0xFF)
cpu.registers['A'] = (tmp & 0xFF)
advancePC(cpu, size)
return nCycles
def AND_Immediate(cpu):
size = 2
nCycles = 2
value = cpu.registers['A'] & cpu.readMemory(cpu.registers['PC'] + 1)
cpu.registers['A'] = value
advancePC(cpu, size)
setN(cpu, value)
setZ(cpu, value)
return nCycles
def AND_Zero(cpu):
size = 2
nCycles = 3
address = addrmodes.Zero(cpu)
value = cpu.readMemory(address)
cpu.registers['A'] &= value
advancePC(cpu, size)
setN(cpu, cpu.registers['A'])
setZ(cpu, cpu.registers['A'])
return nCycles
def AND_Zero_X(cpu):
size = 2
nCycles = 4
address = addrmodes.Zero_X(cpu)
value = cpu.readMemory(address)
cpu.registers['A'] &= value
advancePC(cpu, size)
setN(cpu, cpu.registers['A'])
setZ(cpu, cpu.registers['A'])
return nCycles
def AND_Absolute(cpu):
size = 3
nCycles = 4
address = addrmodes.Absolute(cpu)
value = cpu.readMemory(address)
cpu.registers['A'] &= value
advancePC(cpu, size)
setN(cpu, cpu.registers['A'])
setZ(cpu, cpu.registers['A'])
return nCycles
def AND_Absolute_X(cpu):
size = 3
nCycles = 4
address = addrmodes.Absolute_X(cpu)
value = cpu.readMemory(address)
cpu.registers['A'] &= value
advancePC(cpu, size)
setN(cpu, cpu.registers['A'])
setZ(cpu, cpu.registers['A'])
return nCycles
def AND_Absolute_Y(cpu):
size = 3
nCycles = 4
address = addrmodes.Absolute_Y(cpu)
value = cpu.readMemory(address)
cpu.registers['A'] &= value
advancePC(cpu, size)
setN(cpu, cpu.registers['A'])
setZ(cpu, cpu.registers['A'])
return nCycles
def AND_Indirect_X(cpu):
size = 2
nCycles = 6
address = addrmodes.Indirect_X(cpu)
value = cpu.readMemory(address)
cpu.registers['A'] &= value
advancePC(cpu, size)
setN(cpu, cpu.registers['A'])
setZ(cpu, cpu.registers['A'])
return nCycles
def AND_Indirect_Y(cpu):
size = 2
nCycles = 5
address = addrmodes.Indirect_Y(cpu)
value = cpu.readMemory(address)
cpu.registers['A'] &= value
advancePC(cpu, size)
setN(cpu, cpu.registers['A'])
setZ(cpu, cpu.registers['A'])
return nCycles
def ASL_Accumulator(cpu):
size = 1
nCycles = 2
value = cpu.registers['A']
setC(cpu, value & 0x80)
value <<= 1
value &= 0xFF
advancePC(cpu, size)
setN(cpu, value)
setZ(cpu, value)
cpu.registers['A'] = value
return nCycles
def ASL_Zero(cpu):
size = 2
nCycles = 5
address = addrmodes.Zero(cpu)
value = cpu.readMemory(address)
setC(cpu, value & 0x80)
value <<= 1
value &= 0xFF
advancePC(cpu, size)
setN(cpu, value)
setZ(cpu, value)
cpu.writeMemory(address, value)
return nCycles
def ASL_Zero_X(cpu):
size = 2
nCycles = 6
address = addrmodes.Zero_X(cpu)
value = cpu.readMemory(address)
setC(cpu, value & 0x80)
value <<= 1
value &= 0xFF
advancePC(cpu, size)
setN(cpu, value)
setZ(cpu, value)
cpu.writeMemory(address, value)
return nCycles
def ASL_Absolute(cpu):
size = 3
nCycles = 6
address = addrmodes.Absolute(cpu)
value = cpu.readMemory(address)
setC(cpu, value & 0x80)
value <<= 1
value &= 0xFF
advancePC(cpu, size)
setN(cpu, value)
setZ(cpu, value)
cpu.writeMemory(address, value)
return nCycles
def ASL_Absolute_X(cpu):
size = 3
nCycles = 7
address = addrmodes.Absolute_X(cpu)
value = cpu.readMemory(address)
setC(cpu, value & 0x80)
value <<= 1
value &= 0xFF
advancePC(cpu, size)
setN(cpu, value)
setZ(cpu, value)
cpu.writeMemory(address, value)
return nCycles
def BCC_Relative(cpu):
size = 2
nCycles = 2
value = cpu.readMemory(cpu.registers['PC'] + 1)
value = rel_addr(value)
if not cpu.getStatus(cpu.statusFlags['c']):
if (cpu.registers['PC'] & 0xFF00) != (
(cpu.registers['PC'] + value) & 0xFF00):
nCycles += 2
else:
nCycles += 1
advancePC(cpu, value)
advancePC(cpu, size)
return nCycles
def BCS_Relative(cpu):
size = 2
nCycles = 2
value = cpu.readMemory(cpu.registers['PC'] + 1)
value = rel_addr(value)
if cpu.getStatus(cpu.statusFlags['c']):
if (cpu.registers['PC'] & 0xFF00) != (
(cpu.registers['PC'] + value) & 0xFF00):
nCycles += 2
else:
nCycles += 1
advancePC(cpu, value)
advancePC(cpu, size)
return nCycles
def BEQ_Relative(cpu):
size = 2
nCycles = 2
value = cpu.readMemory(cpu.registers['PC'] + 1)
value = rel_addr(value)
if cpu.getStatus(cpu.statusFlags['z']):
if (cpu.registers['PC'] & 0xFF00) != (
(cpu.registers['PC'] + value) & 0xFF00):
nCycles += 2
else:
nCycles += 1
advancePC(cpu, value)
advancePC(cpu, size)
return nCycles
def BIT_Zero(cpu):
size = 2
nCycles = 3
address = addrmodes.Zero(cpu)
value = cpu.readMemory(address)
advancePC(cpu, size)
setN(cpu, value)
setZ(cpu, value & cpu.registers['A'])
setO(cpu, (value >> 6) & 1)
return nCycles
def BIT_Absolute(cpu):
size = 3
nCycles = 4
address = addrmodes.Absolute(cpu)
value = cpu.readMemory(address)
advancePC(cpu, size)
setN(cpu, value)
setZ(cpu, value & cpu.registers['A'])
setO(cpu, (value >> 6) & 1)
return nCycles
def BMI_Relative(cpu):
size = 2
nCycles = 2
value = cpu.readMemory(cpu.registers['PC'] + 1)
value = rel_addr(value)
if cpu.getStatus(cpu.statusFlags['n']):
if (cpu.registers['PC'] & 0xFF00) != (
(cpu.registers['PC'] + value) & 0xFF00):
nCycles += 2
else:
nCycles += 1
advancePC(cpu, value)
advancePC(cpu, size)
return nCycles
def BNE_Relative(cpu):
size = 2
nCycles = 2
value = cpu.readMemory(cpu.registers['PC'] + 1)
value = rel_addr(value)
if not cpu.getStatus(cpu.statusFlags['z']):
if (cpu.registers['PC'] & 0xFF00) != (
(cpu.registers['PC'] + value) & 0xFF00):
nCycles += 2
else:
nCycles += 1
advancePC(cpu, value)
advancePC(cpu, size)
return nCycles
def BPL_Relative(cpu):
size = 2
nCycles = 2
value = cpu.readMemory(cpu.registers['PC'] + 1)
value = rel_addr(value)
if not cpu.getStatus(cpu.statusFlags['n']):
nCycles += 1
if (cpu.registers['PC'] & 0xFF00) != (
(cpu.registers['PC'] + value) & 0xFF00):
nCycles += 1
#cpu.registers['PC'] += 1
advancePC(cpu, value)
advancePC(cpu, size)
return nCycles
def BRK_Implied(cpu):
size = 1
nCycles = 7
cpu.registers['PC'] += 2
cpu.pushStack((cpu.registers['PC'] >> 8) & 0xFF)
cpu.pushStack(cpu.registers['PC'] & 0xFF)
cpu.setStatus(cpu.statusFlags['b'], 1)
cpu.pushStack(cpu.registers['P'])
cpu.setStatus(cpu.statusFlags['i'], 1)
cpu.InterruptRequest = 0x49
advancePC(cpu, size)
return nCycles
def BVC_Relative(cpu):
size = 2
nCycles = 2
value = cpu.readMemory(cpu.registers['PC'] + 1)
value = rel_addr(value)
if not cpu.getStatus(cpu.statusFlags['v']):
if (cpu.registers['PC'] & 0xFF00) != (
(cpu.registers['PC'] + value) & 0xFF00):
nCycles += 2
else:
nCycles += 1
advancePC(cpu, value)
advancePC(cpu, size)
return nCycles
def BVS_Relative(cpu):
size = 2
nCycles = 2
value = cpu.readMemory(cpu.registers['PC'] + 1)
value = | |
# Refer to the following link for PyQt documentation:
# http://pyqt.sourceforge.net/Docs/PyQt4/classes.html
# Written for AMIS-30543 driver.
'''
At an RPM of 60 and an input of 200 steps in mode 1/1, takes motor 1 second to complete task
At an RPM of 120 and an input of 200 steps in mode 1/2, takes motor 1 second to complete task
'''
import sys
import RNELBanner_rc
from PyQt4 import QtCore, QtGui
from PyQt4.QtGui import QPalette
from serial import *
#imports for multithreading
from threading import Thread, Event
import multiprocessing
import math
import socket
import os
import signal
import RPi.GPIO as GPIO
##### imports for picamera
from picamera.array import PiRGBArray
from picamera import PiCamera
import time
import cv2
import numpy as np
from scipy.misc import imresize
import globalvars
import struct
import Queue
##### end
minHeight, maxHeight = 0, 200000
#global doorclose
#doorclose = True
try:
arduino = Serial('/dev/ttyACM2', 9600)
print("successfully connected to orig arduino!")
except:
arduino = None
pass
try:
arduinoservodoor = Serial('/dev/ttyACM1', 9600)
print("successfully connected to servo arduino!")
except:
arduinoservodoor = None
pass
try:
arduinoCapSense = Serial('/dev/ttyACM0', 115200)
print("successfully connected to cap sensor arduino!")
except:
arduinoCapSense = None
pass
#doorclose = True
target = open("/home/kemerelab/Desktop/CapSenseData.out", 'w')
class Capacitance(QtCore.QThread):
# def __init__(self, threadID, name):
# Thread.__init__(self)
# self.threadID = threadID
# self.name = capacitiveSensorThread
def run(self):
while globalvars.quitThread == False:
if (arduinoCapSense is not None):
arduinoCapSense.flushInput()
capdatatotal = arduinoCapSense.readline()
target.write(capdatatotal)
self.emit(QtCore.SIGNAL('CAP'), capdatatotal)
time.sleep(1.5)
class Ui_Form(QtGui.QWidget):
def __init__(self):
super(Ui_Form, self).__init__()
self.currentPosition = 0
self.level_position = {1:0, 2:1000, 3:2000}
# self.doorclose = True
self.setupUi()
def closeEvent(self, event):
target.close()
globalvars.quitThread = True
time.sleep(1)
t2.join()
print "User has clicked the red x on the main window"
event.accept()
def setupUi(self):
#self.threadclass = level()
#self.threadclass.start()
#self.connect(self, QtCore.SIGNAL('LEVEL'), self.threadclass)
self.setWindowTitle("RNEL Elevator Controller")
rowSpacer = QtGui.QSpacerItem(1, 20)
columnSpacer = QtGui.QSpacerItem(50, 1)
# Highlight input that is currently selected
self.setFocusPolicy(QtCore.Qt.ClickFocus)
# Create UI elements
label_banner = QtGui.QLabel()
label_banner.setText("")
label_banner.setPixmap(QtGui.QPixmap(":/RNELicon/RNELBanner.png"))
font = QtGui.QFont("Helvetica", 12, 75)
font.setBold(True)
label_motorState = QtGui.QLabel("Stepper Motor Parameters")
label_motorState.setFont(font)
label_time = QtGui.QLabel("Time Between Levels (seconds):")
label_steps = QtGui.QLabel("Distance (in):")
label_wheeldiameter = QtGui.QLabel("Wheel Diameter (in)")
label_direction = QtGui.QLabel("Direction:")
label_mode = QtGui.QLabel("Mode:")
#label_torque = QtGui.QLabel("Torque:")
label_capacitance = QtGui.QLabel("Capacitance: ") #LOOK HERE
label_capacitance.setFont(font)
self.capacitance = QtGui.QLCDNumber(self) #LOOK HERE
self.capacitance.setFont(font)
palette = QPalette()
# palette.setBrush(QtGui.QPalette.Light, QtCore.Qt.black)
brush = QtGui.QBrush(QtGui.QColor(0,0,0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
self.capacitance.setPalette(palette)
self.capacitance.setDigitCount(8)
self.threadclass = Capacitance()
self.threadclass.start()
self.connect(self.threadclass, QtCore.SIGNAL('CAP'), self.updateCapacitance)
self.capacitance.display(0) # just so something is there
self.lineEdit_time = QtGui.QLineEdit()
self.lineEdit_time.setMaximumSize(QtCore.QSize(100, 30))
self.lineEdit_time.setText("0")
self.lineEdit_distance = QtGui.QLineEdit()
self.lineEdit_distance.setMaximumSize(QtCore.QSize(100, 30))
self.lineEdit_distance.setText("0")
self.lineEdit_wheeldiameter = QtGui.QLineEdit()
self.lineEdit_wheeldiameter.setText("1")
self.comboBox_direction = QtGui.QComboBox()
self.comboBox_direction.addItems(["Up", "Down"])
self.comboBox_mode = QtGui.QComboBox()
self.comboBox_mode.addItems(["1/1", "1/2", "1/4", "1/8", "1/16", "1/32", "1/64", "1/128"])
self.comboBox_mode.setCurrentIndex(0)
#self.comboBox_torque = QtGui.QComboBox()
#self.comboBox_torque.addItems(["10%", "20%", "30%", "40%", "50%", "60%", "70%", "80%", "90%"])
#self.comboBox_torque.setCurrentIndex(4)
#Preset Levels >>> assign each to a 12" distance later
self.preset_checkbox = QtGui.QCheckBox("Use preset elevator levels")
self.preset_checkbox.setCheckState(False)
self.preset_checkbox.setTristate(False)
label_level = QtGui.QLabel("Level:")
self.comboBox_level = QtGui.QComboBox()
self.comboBox_level.addItems(["1", "2", "3"])
self.comboBox_level.setEnabled(False)
label_assign = QtGui.QLabel("Assign position to level?")
self.btn_assign = QtGui.QPushButton("Assign")
self.btn_assign.setEnabled(False)
self.btn_run = QtGui.QPushButton("Run")
self.btn_doorstat = QtGui.QPushButton("Open/Close")
self.progress_bar = QtGui.QProgressBar()
self.btn_doorstat = QtGui.QPushButton("Open/Close")
label_history = QtGui.QLabel("Command History")
label_history.setFont(font)
self.command_history = QtGui.QPlainTextEdit()
self.command_history.setMaximumSize(QtCore.QSize(1000, 500))
self.command_history.setReadOnly(True)
self.command_history.appendPlainText("Note: The speed will be scaled according to the microstepping mode.")
self.command_history.appendPlainText("Note: The time and distance inputs must be positive integers. Numbers that are not integers will be rounded down.")
self.command_history.appendPlainText("")
font = QtGui.QFont("Helvetica", 12)
label_instructions = QtGui.QLabel("Please visit the following site for instructions:")
label_instructions.setFont(font)
label_website = QtGui.QLabel()
label_website.setFont(font)
label_website.setText("<a href=\"https://github.com/kemerelab/Elevator/\">Elevator Maze</a>")
label_website.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse)
label_website.setOpenExternalLinks(True)
# Format UI elements
formLayout = QtGui.QFormLayout()
formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
formLayout.setLabelAlignment(QtCore.Qt.AlignLeft)
formLayout.addRow(label_time, self.lineEdit_time)
formLayout.addRow(label_steps, self.lineEdit_distance)
formLayout.addRow(label_direction, self.comboBox_direction)
formLayout.addRow(label_mode, self.comboBox_mode)
#formLayout.addRow(label_torque, self.comboBox_torque)
formLayout.addRow(label_wheeldiameter, self.lineEdit_wheeldiameter)
formLayout2 = QtGui.QFormLayout()
formLayout2.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
formLayout2.setLabelAlignment(QtCore.Qt.AlignLeft)
formLayout2.addRow(label_level, self.comboBox_level)
formLayout2.addRow(label_capacitance, self.capacitance) #LOOK HERE
verticalLayout = QtGui.QVBoxLayout()
verticalLayout.addWidget(self.preset_checkbox)
verticalLayout.addLayout(formLayout2)
verticalLayout.addStretch()
verticalLayout.addWidget(label_assign)
verticalLayout.addWidget(self.btn_assign, 0, QtCore.Qt.AlignHCenter)
horizontalLayout = QtGui.QHBoxLayout()
horizontalLayout.addLayout(formLayout)
horizontalLayout.addSpacerItem(columnSpacer)
horizontalLayout.addLayout(verticalLayout)
verticalLayout2 = QtGui.QVBoxLayout(self)
verticalLayout2.setContentsMargins(30, 20, 30, 20)
verticalLayout2.setSizeConstraint(QtGui.QLayout.SetFixedSize)
verticalLayout2.addWidget(label_banner, 0, QtCore.Qt.AlignHCenter)
verticalLayout2.addSpacerItem(rowSpacer)
verticalLayout2.addWidget(label_motorState)
verticalLayout2.addLayout(horizontalLayout)
verticalLayout2.addWidget(self.btn_run, 0, QtCore.Qt.AlignHCenter)
verticalLayout2.addWidget(self.btn_doorstat, 0, QtCore.Qt.AlignRight)
verticalLayout2.addWidget(self.progress_bar)
verticalLayout2.addSpacerItem(rowSpacer)
formLayout3 = QtGui.QFormLayout()
verticalLayout2.addLayout(formLayout3)
formLayout3.addRow(label_capacitance, self.capacitance) #LOOK HERE
verticalLayout2.addWidget(label_history)
verticalLayout2.addWidget(self.command_history)
verticalLayout2.addSpacerItem(rowSpacer)
verticalLayout2.addWidget(label_instructions)
verticalLayout2.addWidget(label_website)
self.btn_run.clicked.connect(self.collectMotorData)
self.btn_doorstat.clicked.connect(self.sendServoData)
self.preset_checkbox.stateChanged.connect(self.updateUI)
self.comboBox_level.currentIndexChanged.connect(self.updateUI)
self.btn_assign.clicked.connect(self.assignPosition)
self.btn_assign.clicked.connect(self.updateUI)
def updateCapacitance(self, val):
self.capacitance.display(val)
def calculateSteps (self):
"""
Distance to be traveled divided by the circumference of the wheel (distance
covered in one rotation) and multiplied by 200 (number of steps in one
rotation of the stepper) in order to find number of steps that need to be
taken to reach desired location.
"""
print(float(self.lineEdit_distance.text()))
self.steppersteps = (float(self.lineEdit_distance.text()) / (math.pi * float(self.lineEdit_wheeldiameter.text()))) * (200 * float(self.comboBox_mode.currentText()[2:]))
print(self.steppersteps)
return self.steppersteps
def delay(self):
"""
Total time for a level change divided by 2 times the number of steps
required to get the desired distance change (to account for rests between
steps) and the mode (to account for microstepping).
"""
#Delay times are approximations as the steps will be rounded later
self.delaytime = float(self.lineEdit_time.text()) / (2 * float(self.steppersteps))
self.delaytime *= 1000
print("delay:", self.delaytime)
return self.delaytime
def reqRPM(self):
"""
Find RPM based off of number of steps needed to move a desired distance
times mode, and divided by 200
"""
reqspeed = (self.steppersteps)/(200 * int(self.comboBox_mode.currentText()[2:]))
reqspeed_valid = True
if reqspeed > 200 or reqspeed < 0:
reqspeed_valid = False
print(reqspeed)
return reqspeed, reqspeed_valid
def collectMotorData(self):
#speed, speed_valid = QtCore.QString.toFloat(self.lineEdit_speed.text())
#torque = str(self.comboBox_torque.currentText()[0])
# If preset levels are used, calculate steps and direction
#### NEEDS TO BE REDONE********
#Not using preset levels
if self.preset_checkbox.checkState() == 2:
steps_valid = True
steps, direction = self.level_calculations()
else:
#steps, steps_valid = QtCore.QString.toFloat(self.lineEdit_distance.text())
steps = int(self.calculateSteps())
direction = str(self.comboBox_direction.currentText())
if direction == "Up" and steps >= maxHeight - self.currentPosition:
steps_valid = True
elif direction == 'Down' and steps <= self.currentPosition - minHeight:
steps_valid = True
else:
steps_valid = False
speed, speed_valid = self.reqRPM()
stepdelay = self.delay()
#if speed_valid == False or steps_valid == False:
# self.errorMessage(0)
#if speed == 0 and speed_valid == True:
# self.errorMessage(1)
#if speed > 200 or speed < 0:
# self.errorMessage(2)
#self.level_position(2)
# speed = 0
# steps = 0
#speed = int(speed)
#if(speed != 0):
#if steps == 0 and steps_valid == True:
#if self.preset_checkbox.checkState() == 0:
# self.errorMessage(3)
#if self.preset_checkbox.checkState() == 2:
# self.errorMessage(6)
#if steps < 0:
# self.errorMessage(8)
# steps = 0
#steps = int(steps)
# Do not step past the top and bottom of the maze
if direction == "Up" and speed != 0:
if steps > maxHeight - self.currentPosition:
self.errorMessage(4)
steps = maxHeight - self.currentPosition
self.currentPosition += int(steps)
if direction == "Down" and speed != 0:
if steps > self.currentPosition - minHeight:
self.errorMessage(5)
steps = self.currentPosition - minHeight
self.currentPosition -= int(steps)
# Using a microstepping mode of 1/2, for example, halves the number of steps
# Multiply the number of steps by the reciprocal of the mode
# This will not affect position tracking as it occurs after position tracking
#print (mode)
self.sendMotorData(str(speed), str(int(self.steppersteps)), self.comboBox_mode.currentText()[2:], direction, str(stepdelay))
def sendMotorData(self, speed, steps, mode, direction, delay):
self.btn_run.setEnabled(False)
#while len(speed) < 4:
# speed = "0" + speed
#while len(steps) < 8:
# steps = "0" + steps
#while len(mode) < 3:
# mode = "0" + mode
#while len(delay) < 6:
# delay = "0" + delay
data = str('x'+speed+'x'+steps+'x'+mode+'x'+delay+'x'+direction)
print("stepper data:", data)
self.command_history.appendPlainText(data)
self.command_history.appendPlainText("Estimated time required (seconds): " + self.lineEdit_time.text())
# self.sendServoData()
try:
arduino.write(data)
self.update_progress(int(self.steppersteps))
#arduino.write("On")
# In a separate thread, block new inputs until Arduino is ready
#if self.steps != 0:
#self.progress_bar.setRange(0, self.steps)
#self.motor_progress = update_thread(self.steps)
#self.motor_progress.start()
#self.motor_progress.bar_value.connect(self.update_progress)
#else:
#self.update_progress(0)
except:
self.command_history.appendPlainText("The Arduino is not connected.")
self.btn_run.setEnabled(True)
#### I think hall effect sensor reading should go here
self.command_history.appendPlainText("Current position: " + str(self.currentPosition))
self.command_history.appendPlainText("")
def sendServoData(self):
if globalvars.doorclose:
try:
arduinoservodoor.write("0")
globalvars.doorclose = not globalvars.doorclose
if(globalvars.doorclose):
print("Door Closed")
else:
print("Door Open")
if(arduinoCapSense is not None):
target.write("door open\n")
except:
self.command_history.appendPlainText("Error reading from servo arduino\n")
else:
try:
arduinoservodoor.write("90")
globalvars.doorclose = not globalvars.doorclose
if(globalvars.doorclose):
print("Door Closed")
else:
print("Door Open")
'''
try:
#while True:
if(arduinoCapSense is not None):
arduinoCapSense.flushInput()
capdata = arduinoCapSense.readline()
target.write(capdata)
target.write("door closed\n")
print capdata
except:
self.command_history.appendPlainText("Error writing to capacitive sensor arduino\n")
'''
except:
self.command_history.appendPlainText("Error writing to servo arduino\n")
def level_calculations(self):
# This method is called in collectMotorData() and updateUI()
current_level = int(self.comboBox_level.currentText())
#self.emit(QtCore.SIGNAL('LEVEL'), current_level)
steps = abs(self.currentPosition - self.level_position[current_level])
if self.currentPosition > self.level_position[current_level]:
direction = "Down"
| |
1
"""
left_corner = ((self[1][0], self[0][0]), 'L')
for s in self.separatrix_diagram(side=True):
if left_corner in s:
return len(s)//2 - 1
def attached_in_degree(self):
r"""
Returns the degree of the singularity at the right of the interval.
OUTPUT:
-- a positive integer
EXAMPLES::
sage: p1 = iet.Permutation('a b c d e f g','d c g f e b a')
sage: p2 = iet.Permutation('a b c d e f g','e d c g f b a')
sage: p1.attached_in_degree()
1
sage: p2.attached_in_degree()
3
"""
right_corner = ((self[0][-1], self[1][-1]), 'R')
for s in self.separatrix_diagram(side=True):
if right_corner in s:
return len(s)//2 - 1
def attached_type(self):
r"""
Return the singularity degree attached on the left and the right.
OUTPUT:
``([degre], angle_parity)`` -- if the same singularity is attached on the left and right
``([left_degree, right_degree], 0)`` -- the degrees at the left and the right which are different singularitites
EXAMPLES:
With two intervals::
sage: p = iet.Permutation('a b','b a')
sage: p.attached_type()
([0], 1)
With three intervals::
sage: p = iet.Permutation('a b c','b c a')
sage: p.attached_type()
([0], 1)
sage: p = iet.Permutation('a b c','c a b')
sage: p.attached_type()
([0], 1)
sage: p = iet.Permutation('a b c','c b a')
sage: p.attached_type()
([0, 0], 0)
With four intervals::
sage: p = iet.Permutation('1 2 3 4','4 3 2 1')
sage: p.attached_type()
([2], 0)
"""
left_corner = ((self[1][0], self[0][0]), 'L')
right_corner = ((self[0][-1], self[1][-1]), 'R')
l = self.separatrix_diagram(side=True)
for s in l:
if left_corner in s and right_corner in s:
i1 = s.index(left_corner)
i2 = s.index(right_corner)
return ([len(s)//2 - 1], ((i2-i1+1)//2) % 2)
elif left_corner in s:
left_degree = len(s)//2 - 1
elif right_corner in s:
right_degree = len(s)//2 - 1
return ([left_degree,right_degree], 0)
def separatrix_diagram(self,side=False):
r"""
Returns the separatrix diagram of the permutation.
INPUT:
- ``side`` - boolean
OUTPUT:
-- a list of lists
EXAMPLES::
sage: iet.Permutation([0, 1], [1, 0]).separatrix_diagram()
[[(1, 0), (1, 0)]]
::
sage: iet.Permutation('a b c d','d c b a').separatrix_diagram()
[[('d', 'a'), 'b', 'c', ('d', 'a'), 'b', 'c']]
"""
separatrices = range(len(self)) # bottom intervals
labels = self[1] # their labels
singularities = []
twin = self._twin
n = len(self)-1
while separatrices != []:
start = separatrices.pop(0)
separatrix = start
if side:
singularity = [(labels[start],'L')]
else:
singularity = [labels[start]]
while True:
if separatrix == 0:
separatrix = twin[0][0]
if side:
a = singularity.pop()[0]
else:
a = singularity.pop()
if side:
singularity.append(((a,labels[separatrix]), 'L'))
else:
singularity.append((a,labels[separatrix]))
if separatrix == start:
singularities.append(singularity)
break
del separatrices[separatrices.index(separatrix)]
else:
separatrix -= 1
if side:
singularity.append((labels[separatrix],'R'))
else:
singularity.append(labels[separatrix])
if separatrix == twin[0][n] :
separatrix = n
if side:
a = singularity.pop()[0]
else:
a = singularity.pop()
if side:
singularity.append(((a,labels[separatrix]),'R'))
else:
singularity.append((a,labels[separatrix]))
separatrix = twin[0][twin[1][separatrix]+1]
if separatrix == start:
singularities.append(singularity)
break
elif separatrix != twin[0][0]:
del separatrices[separatrices.index(separatrix)]
if side:
singularity.append((labels[separatrix],'L'))
else:
singularity.append(labels[separatrix])
return singularities
def stratum(self, marked_separatrix='no'):
r"""
Returns the strata in which any suspension of this permutation lives.
OUTPUT:
- a stratum of Abelian differentials
EXAMPLES::
sage: p = iet.Permutation('a b c', 'c b a')
sage: print p.stratum()
H(0, 0)
sage: p = iet.Permutation('a b c d', 'd a b c')
sage: print p.stratum()
H(0, 0, 0)
sage: p = iet.Permutation(range(9), [8,5,2,7,4,1,6,3,0])
sage: print p.stratum()
H(1, 1, 1, 1)
You can specify that you want to attach the singularity on the left (or
on the right) with the option marked_separatrix::
sage: a = 'a b c d e f g h i j'
sage: b3 = 'd c g f e j i h b a'
sage: b2 = 'd c e g f j i h b a'
sage: b1 = 'e d c g f h j i b a'
sage: p3 = iet.Permutation(a, b3)
sage: p3.stratum()
H(3, 2, 1)
sage: p3.stratum(marked_separatrix='out')
H^out(3, 2, 1)
sage: p2 = iet.Permutation(a, b2)
sage: p2.stratum()
H(3, 2, 1)
sage: p2.stratum(marked_separatrix='out')
H^out(2, 3, 1)
sage: p1 = iet.Permutation(a, b1)
sage: p1.stratum()
H(3, 2, 1)
sage: p1.stratum(marked_separatrix='out')
H^out(1, 3, 2)
AUTHORS:
- <NAME> (2008-12-20)
"""
from sage.dynamics.flat_surfaces.strata import AbelianStratum
if not self.is_irreducible():
return [x.stratum(marked_separatrix) for x in self.decompose()]
if len(self) == 1:
return AbelianStratum([])
singularities = [len(x)//2 - 1 for x in self.separatrix_diagram()]
return AbelianStratum(singularities,marked_separatrix=marked_separatrix)
def genus(self) :
r"""
Returns the genus corresponding to any suspension of the permutation.
OUTPUT:
-- a positive integer
EXAMPLES::
sage: p = iet.Permutation('a b c', 'c b a')
sage: p.genus()
1
::
sage: p = iet.Permutation('a b c d','d c b a')
sage: p.genus()
2
REFERENCES:
Veech
"""
return self.stratum().genus()
def arf_invariant(self):
r"""
Returns the Arf invariant of the suspension of self.
OUTPUT:
integer -- 0 or 1
EXAMPLES:
Permutations from the odd and even component of H(2,2,2)::
sage: a = range(10)
sage: b1 = [3,2,4,6,5,7,9,8,1,0]
sage: b0 = [6,5,4,3,2,7,9,8,1,0]
sage: p1 = iet.Permutation(a,b1)
sage: print p1.arf_invariant()
1
sage: p0 = iet.Permutation(a,b0)
sage: print p0.arf_invariant()
0
Permutations from the odd and even component of H(4,4)::
sage: a = range(11)
sage: b1 = [3,2,5,4,6,8,7,10,9,1,0]
sage: b0 = [5,4,3,2,6,8,7,10,9,1,0]
sage: p1 = iet.Permutation(a,b1)
sage: print p1.arf_invariant()
1
sage: p0 = iet.Permutation(a,b0)
sage: print p0.arf_invariant()
0
REFERENCES:
[Jo80] <NAME>, "Spin structures and quadratic forms on surfaces", J.
London Math. Soc (2), 22, 1980, 365-373
[KoZo03] <NAME>, <NAME> "Connected components of the moduli
spaces of Abelian differentials with prescribed singularities",
Inventiones Mathematicae, 153, 2003, 631-678
"""
M = self.intersection_matrix()
F, C = M.symplectic_form()
g = F.rank()/2
n = F.ncols()
s = 0
for i in range(g):
a = C.row(i)
a_indices = []
for k in xrange(n):
if a[k] != 0: a_indices.append(k)
t_a = len(a_indices) % 2
for j1 in xrange(len(a_indices)):
for j2 in xrange(j1+1,len(a_indices)):
t_a = (t_a + M[a_indices[j1], a_indices[j2]]) % 2
b = C.row(g+i)
b_indices = []
for k in xrange(n):
if b[k] != 0: b_indices.append(k)
t_b = len(b_indices) % 2
for j1 in xrange(len(b_indices)):
for j2 in xrange(j1+1,len(b_indices)):
t_b = (t_b + M[b_indices[j1],b_indices[j2]]) % 2
s = (s + t_a * t_b) % 2
return s
def connected_component(self,marked_separatrix='no'):
r"""
Returns a connected components of a stratum.
EXAMPLES:
Permutations from the stratum H(6)::
sage: a = range(8)
sage: b_hyp = [7,6,5,4,3,2,1,0]
sage: b_odd = [3,2,5,4,7,6,1,0]
sage: b_even = [5,4,3,2,7,6,1,0]
sage: p_hyp = iet.Permutation(a, b_hyp)
sage: p_odd = iet.Permutation(a, b_odd)
sage: p_even = iet.Permutation(a, b_even)
sage: print p_hyp.connected_component()
H_hyp(6)
sage: print p_odd.connected_component()
H_odd(6)
sage: print p_even.connected_component()
H_even(6)
Permutations from the stratum H(4,4)::
sage: a = range(11)
sage: b_hyp = [10,9,8,7,6,5,4,3,2,1,0]
sage: b_odd = [3,2,5,4,6,8,7,10,9,1,0]
sage: b_even = [5,4,3,2,6,8,7,10,9,1,0]
sage: p_hyp = iet.Permutation(a,b_hyp)
sage: p_odd = iet.Permutation(a,b_odd)
sage: p_even = iet.Permutation(a,b_even)
sage: p_hyp.stratum() == AbelianStratum(4,4)
True
sage: print p_hyp.connected_component()
H_hyp(4, 4)
sage: p_odd.stratum() == AbelianStratum(4,4)
True
sage: print p_odd.connected_component()
H_odd(4, 4)
sage: p_even.stratum() == AbelianStratum(4,4)
True
sage: print p_even.connected_component()
H_even(4, 4)
As for stratum you can specify that you want to attach the singularity
on the left of the interval using the option marked_separatrix::
sage: a = [1,2,3,4,5,6,7,8,9]
sage: b4_odd = [4,3,6,5,7,9,8,2,1]
sage: b4_even = [6,5,4,3,7,9,8,2,1]
sage: b2_odd = [4,3,5,7,6,9,8,2,1]
sage: b2_even = [7,6,5,4,3,9,8,2,1]
sage: p4_odd = iet.Permutation(a,b4_odd)
sage: p4_even = iet.Permutation(a,b4_even)
sage: p2_odd = iet.Permutation(a,b2_odd)
sage: p2_even = iet.Permutation(a,b2_even)
sage: p4_odd.connected_component(marked_separatrix='out')
H_odd^out(4, 2)
sage: p4_even.connected_component(marked_separatrix='out')
H_even^out(4, 2)
sage: p2_odd.connected_component(marked_separatrix='out')
H_odd^out(2, 4)
sage: p2_even.connected_component(marked_separatrix='out')
H_even^out(2, 4)
sage: p2_odd.connected_component() == p4_odd.connected_component()
True
sage: p2_odd.connected_component('out') == p4_odd.connected_component('out')
False
"""
from sage.dynamics.flat_surfaces.strata import (HypCCA,
OddCCA, EvenCCA)
if not self.is_irreducible():
return [x.connected_component(marked_separatrix) for x in self.decompose()]
stratum = self.stratum(marked_separatrix=marked_separatrix)
cc = stratum._cc
if len(cc) == 1:
return stratum.connected_components()[0]
if HypCCA in cc:
if self.is_hyperelliptic():
return HypCCA(stratum)
else:
cc = cc[1:]
if len(cc) == 1:
return cc[0](stratum)
else:
spin = self.arf_invariant()
if spin == 0:
return EvenCCA(stratum)
else:
return OddCCA(stratum)
def order_of_rauzy_action(self, winner, side=None):
r"""
Returns the order of the action of a Rauzy move.
INPUT:
- ``winner`` - string ``'top'`` or ``'bottom'``
- ``side`` - string ``'left'`` or ``'right'``
OUTPUT:
An integer corresponding to the order of the Rauzy action.
EXAMPLES::
sage: p = iet.Permutation('a b c d','d a c b')
sage: p.order_of_rauzy_action('top', 'right')
3
sage: p.order_of_rauzy_action('bottom', 'right')
2
sage: p.order_of_rauzy_action('top', 'left')
1
| |
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}),
Status.parse(api=None, json={
"id": 1,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user1 @user2",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}),
Status.parse(api=None, json={
"id": 2,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user2",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
})
]
user_features = UserFeatures(user, tweets)
self.assertAlmostEqual(user_features[USER_FEATURES_INDEX["number_of_user_mentions_mean"]], np.mean([3, 2, 1]))
def test_number_of_user_mentions_std_nan(self):
user_dic = {
"id": 1,
"name": "<NAME>",
"screen_name": "test_account",
"location": "",
"url": None,
"expanded_url": None,
"description": "",
"protected": False,
"verified": False,
"followers_count": 10,
"friends_count": 15,
"listed_count": 2,
"favourites_count": 50,
"statuses_count": 9,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(TWITTER_DATE_TIME_FORMAT),
"profile_image_url_https": "",
"default_profile": True,
"default_profile_image": True,
"withheld_in_countries": "",
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}
user = User.parse(api=None, json=user_dic)
user_features = UserFeatures(user, [])
self.assertTrue(np.isnan(user_features[USER_FEATURES_INDEX["number_of_user_mentions_std"]]))
def test_number_of_user_mentions_std(self):
user_dic = {
"id": 1,
"name": "Test Account",
"screen_name": "test_account",
"location": "",
"url": None,
"expanded_url": None,
"description": "",
"protected": False,
"verified": False,
"followers_count": 10,
"friends_count": 15,
"listed_count": 2,
"favourites_count": 50,
"statuses_count": 9,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(TWITTER_DATE_TIME_FORMAT),
"profile_image_url_https": "",
"default_profile": True,
"default_profile_image": True,
"withheld_in_countries": "",
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}
user = User.parse(api=None, json=user_dic)
tweets = [
Status.parse(api=None, json={
"id": 0,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user1 @user2 @user3",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}),
Status.parse(api=None, json={
"id": 1,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user1 @user2",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}),
Status.parse(api=None, json={
"id": 2,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user2",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
})
]
user_features = UserFeatures(user, tweets)
self.assertAlmostEqual(user_features[USER_FEATURES_INDEX["number_of_user_mentions_std"]], np.std([3, 2, 1]))
def test_number_of_user_mentions_max_nan(self):
user_dic = {
"id": 1,
"name": "<NAME>",
"screen_name": "test_account",
"location": "",
"url": None,
"expanded_url": None,
"description": "",
"protected": False,
"verified": False,
"followers_count": 10,
"friends_count": 15,
"listed_count": 2,
"favourites_count": 50,
"statuses_count": 9,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(TWITTER_DATE_TIME_FORMAT),
"profile_image_url_https": "",
"default_profile": True,
"default_profile_image": True,
"withheld_in_countries": "",
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}
user = User.parse(api=None, json=user_dic)
user_features = UserFeatures(user, [])
self.assertTrue(np.isnan(user_features[USER_FEATURES_INDEX["number_of_user_mentions_max"]]))
def test_number_of_user_mentions_max(self):
user_dic = {
"id": 1,
"name": "<NAME>",
"screen_name": "test_account",
"location": "",
"url": None,
"expanded_url": None,
"description": "",
"protected": False,
"verified": False,
"followers_count": 10,
"friends_count": 15,
"listed_count": 2,
"favourites_count": 50,
"statuses_count": 9,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(TWITTER_DATE_TIME_FORMAT),
"profile_image_url_https": "",
"default_profile": True,
"default_profile_image": True,
"withheld_in_countries": "",
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}
user = User.parse(api=None, json=user_dic)
tweets = [
Status.parse(api=None, json={
"id": 0,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user1 @user2 @user3",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}),
Status.parse(api=None, json={
"id": 1,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user1 @user2",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}),
Status.parse(api=None, json={
"id": 2,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user2",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
})
]
user_features = UserFeatures(user, tweets)
self.assertAlmostEqual(user_features[USER_FEATURES_INDEX["number_of_user_mentions_max"]], 3.0)
def test_number_of_user_mentions_min_nan(self):
user_dic = {
"id": 1,
"name": "Test Account",
"screen_name": "test_account",
"location": "",
"url": None,
"expanded_url": None,
"description": "",
"protected": False,
"verified": False,
"followers_count": 10,
"friends_count": 15,
"listed_count": 2,
"favourites_count": 50,
"statuses_count": 9,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(TWITTER_DATE_TIME_FORMAT),
"profile_image_url_https": "",
"default_profile": True,
"default_profile_image": True,
"withheld_in_countries": "",
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}
user = User.parse(api=None, json=user_dic)
user_features = UserFeatures(user, [])
self.assertTrue(np.isnan(user_features[USER_FEATURES_INDEX["number_of_user_mentions_min"]]))
def test_number_of_user_mentions_min(self):
user_dic = {
"id": 1,
"name": "Test Account",
"screen_name": "test_account",
"location": "",
"url": None,
"expanded_url": None,
"description": "",
"protected": False,
"verified": False,
"followers_count": 10,
"friends_count": 15,
"listed_count": 2,
"favourites_count": 50,
"statuses_count": 9,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(TWITTER_DATE_TIME_FORMAT),
"profile_image_url_https": "",
"default_profile": True,
"default_profile_image": True,
"withheld_in_countries": "",
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}
user = User.parse(api=None, json=user_dic)
tweets = [
Status.parse(api=None, json={
"id": 0,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user1 @user2 @user3",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}),
Status.parse(api=None, json={
"id": 1,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user1 @user2",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}),
Status.parse(api=None, json={
"id": 2,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text. @user2",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
})
]
user_features = UserFeatures(user, tweets)
self.assertAlmostEqual(user_features[USER_FEATURES_INDEX["number_of_user_mentions_min"]], 1.0)
def test_number_of_sentences_mean_nan(self):
user_dic = {
"id": 1,
"name": "Test Account",
"screen_name": "test_account",
"location": "",
"url": None,
"expanded_url": None,
"description": "",
"protected": False,
"verified": False,
"followers_count": 10,
"friends_count": 15,
"listed_count": 2,
"favourites_count": 50,
"statuses_count": 9,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(TWITTER_DATE_TIME_FORMAT),
"profile_image_url_https": "",
"default_profile": True,
"default_profile_image": True,
"withheld_in_countries": "",
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}
user = User.parse(api=None, json=user_dic)
user_features = UserFeatures(user, [])
self.assertTrue(np.isnan(user_features[USER_FEATURES_INDEX["number_of_sentences_mean"]]))
def test_number_of_sentences_mean(self):
user_dic = {
"id": 1,
"name": "Test Account",
"screen_name": "test_account",
"location": "",
"url": None,
"expanded_url": None,
"description": "",
"protected": False,
"verified": False,
"followers_count": 10,
"friends_count": 15,
"listed_count": 2,
"favourites_count": 50,
"statuses_count": 9,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(TWITTER_DATE_TIME_FORMAT),
"profile_image_url_https": "",
"default_profile": True,
"default_profile_image": True,
"withheld_in_countries": "",
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}
user = User.parse(api=None, json=user_dic)
tweets = [
Status.parse(api=None, json={
"id": 0,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text.",
"coordinates": None,
"place": None,
"in_reply_to_status_id": None,
"in_reply_to_user_id": None,
"quoted_status_id": None,
"retweet_count": 2,
"favorite_count": 3,
"lang": "en",
"withheld_copyright": False,
"withheld_in_countries": None,
"entities": {
"urls": []
},
"source": "Twitter Web App",
"videos": 0,
"photos": 0,
"gifs": 0,
"fetch_date": datetime.strptime("2000-01-01 23:59:59", DATE_TIME_FORMAT)
}),
Status.parse(api=None, json={
"id": 1,
"user_id": 1,
"created_at": datetime.strptime("2000-01-01 00:00:00", DATE_TIME_FORMAT).strftime(
TWITTER_DATE_TIME_FORMAT),
"text": "This is just a simple test tweet text.",
"coordinates": None,
"place": | |
<filename>src/kanapy/cli.py
# -*- coding: utf-8 -*-
import os, sys
import shutil, json
import click
from kanapy.util import ROOT_DIR, MAIN_DIR
from kanapy.input_output import particleStatGenerator, particleCreator, RVEcreator, \
write_position_weights, write_output_stat, plot_output_stats, \
extract_volume_sharedGBarea, read_dump, export2abaqus
from kanapy.packing import packingRoutine
from kanapy.voxelization import voxelizationRoutine
from kanapy.analyze_texture import textureReduction
from kanapy.smoothingGB import smoothingRoutine
from numpy import asarray
@click.group()
@click.pass_context
def main(ctx):
pass
@main.command(name='autoComplete')
@click.pass_context
def autocomplete(ctx):
""" Kanapy bash auto completion."""
click.echo('')
os.system("echo '# For KANAPY bash autocompletion' >> ~/.bashrc")
os.system("echo '. {}' >> ~/.bashrc".format(ROOT_DIR+'/kanapy-complete.sh'))
@main.command(name='runTests')
@click.pass_context
def tests(ctx):
""" Runs unittests built within kanapy."""
click.echo('')
os.system("pytest {0}/tests/ -v".format(MAIN_DIR))
click.echo('')
@main.command(name='genDocs')
@click.pass_context
def docs(ctx):
""" Generates a HTML-based reference documentation."""
click.echo('')
os.system("make -C {0}/docs/ clean && make -C {0}/docs/ html".format(MAIN_DIR))
click.echo('')
click.echo("The HTML documentation can be found at '/path/to/your/kanapy/docs/index.html'")
click.echo('')
@main.command(name='genStats')
@click.option('-f', default=None, help='Input statistics file name in the current directory.')
@click.pass_context
def createStats(ctx, f: str):
""" Generates particle statistics based on the data provided in the input file."""
if f == None:
click.echo('')
click.echo('Please provide the name of the input file available in the current directory', err=True)
click.echo('For more info. run: kanapy statgenerate --help\n', err=True)
sys.exit(0)
else:
cwd = os.getcwd()
if not os.path.exists(cwd + '/{}'.format(f)):
click.echo('')
click.echo("Mentioned file: '{}' does not exist in the current working directory!\n".format(f), err=True)
sys.exit(0)
# Open the user input statistics file and read the data
try:
with open(cwd + '/' + f) as json_file:
stats_dict = json.load(json_file)
except FileNotFoundError:
print('Input file not found, make sure "stat_input.json" file is present in the working directory!')
raise FileNotFoundError
particleStatGenerator(stats_dict, save_files=True)
@main.command(name='genRVE')
@click.option('-f', default=None, help='Input statistics file name in the current directory.')
@click.pass_context
def createRVE(ctx, f: str):
""" Creates RVE based on the data provided in the input file."""
if f == None:
click.echo('')
click.echo('Please provide the name of the input file available in the current directory', err=True)
click.echo('For more info. run: kanapy statgenerate --help\n', err=True)
sys.exit(0)
else:
cwd = os.getcwd()
if not os.path.exists(cwd + '/{}'.format(f)):
click.echo('')
click.echo("Mentioned file: '{}' does not exist in the current working directory!\n".format(f), err=True)
sys.exit(0)
# Open the user input statistics file and read the data
try:
with open(cwd + '/' + f) as json_file:
stats_dict = json.load(json_file)
except FileNotFoundError:
print('Input file not found, make sure "stat_input.json" file is present in the working directory!')
raise FileNotFoundError
RVEcreator(stats_dict, save_files=True)
@main.command(name='readGrains')
@click.option('-f', default=None, help='Input file name in the current directory.')
@click.option('-periodic', default='True', help='RVE periodicity status.')
@click.option('-units', default='mm', help='Output unit format.')
@click.pass_context
def readGrains(ctx, f: str, periodic: str, units: str):
''' Generates particles based on the grain data provided in the input file.'''
if f == None:
click.echo('')
click.echo('Please provide the name of the input file available in the current directory', err=True)
click.echo('For more info. run: kanapy readGrains --help\n', err=True)
sys.exit(0)
elif ((periodic!='True') and (periodic!='False')):
click.echo('')
click.echo('Invalid entry!, Run: kanapy readGrains again', err=True)
click.echo('For more info. run: kanapy readGrains --help\n', err=True)
sys.exit(0)
elif ((units!='mm') and (units!='um')):
click.echo('')
click.echo('Invalid entry!, Run: kanapy readGrains again', err=True)
click.echo('For more info. run: kanapy readGrains --help\n', err=True)
sys.exit(0)
else:
cwd = os.getcwd()
if not os.path.exists(cwd + '/{}'.format(f)):
click.echo('')
click.echo("Mentioned file: '{}' does not exist in the current working directory!\n".format(f), err=True)
sys.exit(0)
particleCreator(cwd + '/' + f, periodic=periodic, units=units)
@main.command()
@click.pass_context
def pack(ctx):
""" Packs the particles into a simulation box."""
try:
cwd = os.getcwd()
json_dir = cwd + '/json_files' # Folder to store the json files
try:
# Load the dictionaries from json files
with open(json_dir + '/particle_data.json') as json_file:
particle_data = json.load(json_file)
with open(json_dir + '/RVE_data.json') as json_file:
RVE_data = json.load(json_file)
with open(json_dir + '/simulation_data.json') as json_file:
simulation_data = json.load(json_file)
except:
raise FileNotFoundError('Json files not found, make sure "RVE creator" command is executed first!')
packingRoutine(particle_data, RVE_data, simulation_data, save_files=True)
except KeyboardInterrupt:
sys.exit(0)
@main.command()
@click.pass_context
def voxelize(ctx):
""" Generates the RVE by assigning voxels to grains."""
try:
cwd = os.getcwd()
json_dir = cwd + '/json_files' # Folder to store the json files
try:
with open(json_dir + '/RVE_data.json') as json_file:
RVE_data = json.load(json_file)
with open(json_dir + '/particle_data.json') as json_file:
particle_data = json.load(json_file)
except FileNotFoundError:
raise FileNotFoundError('Json file not found, make sure "RVE_data.json" file exists!')
# Read the required dump file
if particle_data['Type'] == 'Equiaxed':
filename = cwd + '/dump_files/particle.{0}.dump'.format(800)
else:
filename = cwd + '/dump_files/particle.{0}.dump'.format(500)
sim_box, Ellipsoids = read_dump(filename)
voxelizationRoutine(particle_data, RVE_data, Ellipsoids, sim_box, save_files=True)
except KeyboardInterrupt:
sys.exit(0)
@main.command()
@click.pass_context
def smoothen(ctx):
""" Generates smoothed grain boundary from a voxelated mesh."""
try:
print('')
print('Starting Grain boundary smoothing')
cwd = os.getcwd()
json_dir = cwd + '/json_files'
try:
with open(json_dir + '/nodes_v.csv', 'r') as f:
hh = f.read()
hx = hh.split('\n')
hs = []
for hy in hx[0:-1]:
hs.append(hy.split(', '))
nodes_v = asarray(hs, dtype=float)
with open(json_dir + '/elmtDict.json') as json_file:
elmtDict = {int(k):v for k,v in json.load(json_file).items()}
with open(json_dir + '/elmtSetDict.json') as json_file:
elmtSetDict = {int(k):v for k,v in json.load(json_file).items()}
except FileNotFoundError:
print('Json files not found, make sure "nodes_v.json", "elmtDict.json" and "elmtSetDict.json" files exist!')
raise FileNotFoundError
smoothingRoutine(nodes_v, elmtDict, elmtSetDict, save_files=True)
except KeyboardInterrupt:
sys.exit(0)
return
@main.command(name='abaqusOutput')
@click.pass_context
def abaqusoutput(ctx):
""" Writes out the Abaqus (.inp) file for the voxelized RVE."""
try:
print('\nStarting Abaqus export for voxelized structure')
cwd = os.getcwd()
json_dir = cwd + '/json_files' # Folder to store the json files
try:
with open(json_dir + '/simulation_data.json') as json_file:
simulation_data = json.load(json_file)
with open(json_dir + '/nodes_v.csv', 'r') as f:
hh = f.read()
hx = hh.split('\n')
hs = []
for hy in hx[0:-1]:
hs.append(hy.split(', '))
nodes_v = asarray(hs, dtype=float)
with open(json_dir + '/elmtDict.json') as json_file:
elmtDict = json.load(json_file)
with open(json_dir + '/elmtSetDict.json') as json_file:
elmtSetDict = json.load(json_file)
except FileNotFoundError:
raise FileNotFoundError('Json file not found, make sure "kanapy voxelize" command is executed first!')
name = cwd + '/kanapy_{0}grains_voxels.inp'.format(len(elmtSetDict))
if os.path.exists(name):
os.remove(name) # remove old file if it exists
export2abaqus(nodes_v, name, simulation_data, elmtSetDict, elmtDict, grain_facesDict=None)
except KeyboardInterrupt:
sys.exit(0)
@main.command(name='abaqusOutput-smooth')
@click.pass_context
def abaqusoutput_smooth(ctx):
""" Writes out the Abaqus (.inp) file for the smoothened RVE."""
try:
print('\nStarting Abaqus export for smoothened structure')
cwd = os.getcwd()
json_dir = cwd + '/json_files' # Folder to store the json files
try:
with open(json_dir + '/simulation_data.json') as json_file:
simulation_data = json.load(json_file)
with open(json_dir + '/nodes_s.csv', 'r') as f:
hh = f.read()
hx = hh.split('\n')
hs = []
for hy in hx[0:-1]:
hs.append(hy.split(', '))
nodes_v = asarray(hs, dtype=float)
with open(json_dir + '/elmtDict.json') as json_file:
elmtDict = json.load(json_file)
with open(json_dir + '/elmtSetDict.json') as json_file:
elmtSetDict = json.load(json_file)
with open(json_dir + '/grain_facesDict.json') as json_file:
grain_facesDict = json.load(json_file)
except FileNotFoundError:
raise FileNotFoundError('Json file not found, make sure "kanapy smoothen" command is executed first!')
name = cwd + '/kanapy_{0}grains_smooth.inp'.format(len(elmtSetDict))
if os.path.exists(name):
os.remove(name) # remove old file if it exists
export2abaqus(nodes_v, name, simulation_data, elmtSetDict, elmtDict, grain_facesDict=grain_facesDict)
except KeyboardInterrupt:
sys.exit(0)
@main.command(name='outputStats')
@click.pass_context
def outputstats(ctx):
""" Writes out the particle- and grain diameter attributes for statistical comparison. Final RVE
grain volumes and shared grain boundary surface areas info are written out as well.
.. note:: Particle information is read from (.json) file generated by :meth:`kanapy.input_output.particleStatGenerator`.
RVE grain information is read from the (.json) files generated by :meth:`kanapy.voxelization.voxelizationRoutine`.
"""
cwd = os.getcwd()
json_dir = cwd + '/json_files' # Folder to store the json files
try:
with open(json_dir + '/nodes_v.csv', 'r') as f:
hh = f.read()
hx = hh.split('\n')
hs = []
for hy in hx[0:-1]:
hs.append(hy.split(', '))
nodes_v = asarray(hs, dtype=float)
with open(json_dir + '/elmtDict.json') as json_file:
inpDict = json.load(json_file)
elmtDict =dict([int(a), x] for a, x in inpDict.items())
with open(json_dir + '/elmtSetDict.json') as json_file:
inpDict = json.load(json_file)
elmtSetDict = dict([int(a), x] for a, x in inpDict.items())
with open(json_dir + '/particle_data.json') as json_file:
particle_data = json.load(json_file)
with open(json_dir + '/RVE_data.json') as json_file:
RVE_data = json.load(json_file)
with open(json_dir + '/simulation_data.json') as json_file:
simulation_data = json.load(json_file)
except FileNotFoundError:
print('Json file not found, make sure "Input statistics, Packing, & Voxelization" | |
THREAD STOPPED ")
return
def process_cmd(line):
"""
Process CAPI commands and send through socket if necessary
Parameters
----------
line : str
CAPI command followed by parameters with "," as delimiter
Returns
-------
none
Examples
--------
process_cmd(ca_get_version)
process_cmd(sniffer_control_filter_capture,infile,_521-step1,
outfile,521-step1_A,srcmac,00:11:22:33:44:55,
destmac,55:44:33:22:11:00)
"""
global conntable, threadCount, waitsocks_par, runningPhase, testRunning, streamInfoArray, resultPrinted
global retValueTable, RTPCount, multicast, ifcondBit, iDNB, iINV, ifCondBit, socktimeout
line = line.rstrip()
str = line.split('#')
recv_id = {}
try:
if str[0] == '':
return
command = str[0].split('!')
if command[0].lower() == "else":
if int(ifCondBit):
ifCondBit = 0
else:
ifCondBit = 1
return
if command[0].lower() == "endif":
ifCondBit = 1
return
if command[0].lower() == "if":
if command[1] in retValueTable:
command[1] = retValueTable[command[1]]
if command[3] in retValueTable:
command[3] = retValueTable[command[3]]
if(command[2]).lower() == "=":
if (command[1]).lower() == (command[3]).lower():
ifcondBit = 1
else:
ifcondBit = 0
elif (command[2]).lower() == ">":
if long(command[1]) > long(command[3]):
ifcondBit = 1
else:
ifcondBit = 0
elif (command[2]).lower() == "<":
if long(command[1]) < long(command[3]):
ifcondBit = 1
else:
ifcondBit = 0
elif (command[2]).lower() == ">=":
if long(command[1]) >= long(command[3]):
ifcondBit = 1
else:
ifcondBit = 0
elif (command[2]).lower() == "<=":
if long(command[1]) <= long(command[3]):
ifcondBit = 1
else:
ifcondBit = 0
elif (command[2]).lower() == "<>":
if (command[1]).lower() != (command[3]).lower():
ifcondBit = 1
else:
ifcondBit = 0
return
if int(ifcondBit) == 0:
return
if command[0].lower() == "_dnb_":
iDNB = 1
return
if command[0].lower() == "_inv":
iINV = 1
return
if command[0].lower() == "inv_":
iINV = 0
return
if command[0].lower() == "mexpr":
if command[1] not in retValueTable:
return
if command[3] in retValueTable:
command[3] = retValueTable[command[3]]
if command[2] == "%":
retValueTable[command[1]] = (int(retValueTable[command[1]]) * int(command[3])) / 100
return
if command[0].lower() == "extract_p2p_ssid":
if command[1] in retValueTable:
command[1] = retValueTable[command[1]]
p2p_ssid = command[1].split(' ')
if len(p2p_ssid) > 1:
retValueTable.setdefault("$P2P_SSID", "%s" % p2p_ssid[1])
else:
logging.error("Invalid P2P Group ID")
return
if command[0].lower() == "calculate_ext_listen_values":
if command[1] not in retValueTable or command[2] not in retValueTable:
wfa_sys_exit("%s or %s not available" % (command[1], command[2]))
command[1] = retValueTable[command[1]]
command[2] = retValueTable[command[2]]
retValueTable.setdefault("$PROBE_REQ_INTERVAL", "%s" % (int(command[2]) / 2))
retValueTable.setdefault("$PROBE_REQ_COUNT", "%s" % (int(command[1]) / (int(command[2]) / 2)))
return
if command[0].lower() == "get_rnd_ip_address":
if command[1] in retValueTable:
command[1] = retValueTable[command[1]]
if command[2] in retValueTable:
command[2] = retValueTable[command[2]]
ip1 = command[1].split(".")
ip2 = command[2].split(".")
if (int(ip2[3]) + 1) != int(ip1[3]):
rnd_ip = ("%s.%s.%s.%s" % (ip2[0], ip2[1], ip2[2], int(ip2[3]) + 1))
else:
rnd_ip = ("%s.%s.%s.%s" % (ip2[0], ip2[1], ip2[2], int(ip2[3]) + 2))
retValueTable.setdefault(command[3], "%s" % rnd_ip)
return
if command[0].lower() == 'ucc_form_device_discovery_frame':
iCn = 0
for c in command:
if iCn > 1 and c in command:
wfa_sys_exit("Invalid UCC command")
#command[1] Frame command[2] GOUT Device Address command[3] group ID command[4] Injector source Address command[5] Testbed Client address
f = command[1].split('*')
iCn = 0
#Hex SSID
SSID = retValueTable[command[3]].split(" ")[1]
SSIDLength = len(SSID)
SSIDLen1 = hex(int(SSIDLength) + 22).split("0x")[1]
SSIDLen2 = "%s 00" % hex(int(SSIDLength + 6)).split("0x")[1]
if int(len(SSIDLen2)) < 5:
SSIDLen2 = "0%s" % SSIDLen2
hexSSID = ""
for s in SSID:
h = hex(ord(s)).split("0x")[1]
hexSSID = hexSSID + h
logging.debug("hexSSID = %s hexLength %s" % (hexSSID, SSIDLength))
FrameData = "%s%s%s%s%s%s%s%s%s%s%s%s" % (f[0],
retValueTable[command[2]],
retValueTable[command[4]],
retValueTable[command[2]],
f[3],
SSIDLen1,
f[4],
retValueTable[command[5]],
f[5],
SSIDLen2,
retValueTable[command[2]],
hexSSID)
logging.debug(FrameData)
retValueTable.setdefault("$INJECT_FRAME_DATA", FrameData)
if command[0].lower() == 'addstaversioninfo':
vInfo = command[1].split(",")
i = 0
if len(vInfo) < 5:
logging.info("Incorrect version format")
return
if vInfo[0] not in retValueTable:
logging.debug("Unknown Component[1] %s", vInfo[0])
return
if retValueTable[vInfo[0]] not in conntable:
if retValueTable[retValueTable[vInfo[0]]] not in conntable:
logging.debug("Unknown Component[3] %s", vInfo[0])
return
#print vInfo
print len(retValueTable)
for c in vInfo:
if c in retValueTable:
vInfo[i] = retValueTable[c]
if vInfo[i] in DisplayNameTable:
vInfo[i] = DisplayNameTable[vInfo[i]]
i = i + 1
XLogger.AddTestbedDevice(vInfo[1], vInfo[2], vInfo[3], vInfo[4])
logging.debug(vInfo)
return
if command[0].lower() == 'adduccscriptversion':
XLogger.AddWTSComponent("UCC", VERSION, command[1])
if command[0].lower() == 'addwtscompversioninfo' or command[0].lower() == 'adddutversioninfo':
vInfo = command[1].split(",")
i = 0
if len(vInfo) < 5:
logging.info("Incorrect version format...")
return
if vInfo[0] in retValueTable:
vInfo[0] = retValueTable[vInfo[0]]
#print vInfo
print len(retValueTable)
for c in vInfo:
if c in retValueTable:
vInfo[i] = retValueTable[c]
if vInfo[i] in DisplayNameTable:
vInfo[i] = DisplayNameTable[vInfo[i]]
i = i + 1
if command[0].lower() == 'adddutversioninfo':
XLogger.AddDUTInfo(vInfo[1], vInfo[2], vInfo[3], vInfo[4])
logging.debug("DUT INFO [%s][%s][%s][%s]" % (vInfo[1], vInfo[2], vInfo[3], vInfo[4]))
else:
logging.debug("WTS Comp[%s][%s][%s][%s]" % (vInfo[1], vInfo[2], vInfo[3], vInfo[4]))
XLogger.AddWTSComponent(vInfo[0], vInfo[1], "%s:%s:%s" % (vInfo[2], vInfo[3], vInfo[4]))
logging.debug(vInfo)
return
if re.search("STA", command[0]):
if command[0] in retValueTable:
command[0] = retValueTable[command[0]]
else:
return
if command[0].lower() == 'exit':
set_color(FOREGROUND_CYAN | FOREGROUND_INTENSITY)
wfa_sys_exit("Exiting - %s" % command[1])
if command[0].lower() == 'pause':
set_color(FOREGROUND_YELLOW | FOREGROUND_INTENSITY)
logging.info("Exeuction Paused - %s \n Press any key to continue..." % command[1])
sys.stdin.read(1)
set_color(FOREGROUND_INTENSITY)
return
if command[0].lower() == 'sleep':
if command[1] in retValueTable:
command[1] = retValueTable[command[1]]
time.sleep(float(command[1]))
return
if command[0].lower() == 'userinput':
set_color(FOREGROUND_YELLOW | FOREGROUND_INTENSITY)
logging.info("[USER INPUT REQUIRED]")
udata = raw_input(command[1])
if command[2] in retValueTable:
retValueTable[command[2]] = udata
else:
retValueTable.setdefault(command[2], udata)
set_color(FOREGROUND_INTENSITY)
return
if command[0].lower() == 'userinput_ifnowts':
if retValueTable["$WTS_ControlAgent_Support"] == "0":
set_color(FOREGROUND_YELLOW | FOREGROUND_INTENSITY)
logging.info("[USER INPUT REQUIRED]")
udata = raw_input(command[1])
if command[2] in retValueTable:
retValueTable[command[2]] = udata
else:
retValueTable.setdefault(command[2], udata)
set_color(FOREGROUND_INTENSITY)
return
if command[0].lower() == 'ifnowts':
if retValueTable["$WTS_ControlAgent_Support"] == "0":
set_color(FOREGROUND_YELLOW | FOREGROUND_INTENSITY)
if len(command) > 3 and command[2] in retValueTable:
s = "- %s" % retValueTable[command[2]]
else:
s = ""
logging.info("%s %s\n Press any key to continue after done" % (command[1], s))
sys.stdin.read(1)
set_color(FOREGROUND_INTENSITY)
return
if command[0] == 'wfa_control_agent' or command[0] == 'wfa_control_agent_dut':
if retValueTable["$WTS_ControlAgent_Support"] == "0":
return
if command[0].lower() == 'getuccsystemtime':
timeStr = time.strftime("%H-%M-%S-%m-%d-%Y", time.localtime())
logging.debug("\n Reading UCC System time %s" % timeStr)
t = timeStr.split("-")
retValueTable.setdefault("$month", t[3])
retValueTable.setdefault("$date", t[4])
retValueTable.setdefault("$year", t[5])
retValueTable.setdefault("$hours", t[0])
retValueTable.setdefault("$minutes", t[1])
retValueTable.setdefault("$seconds", t[2])
logging.debug("""\n UCC System Time -
Month:%s:
Date:%s:
Year:%s:
Hours:%s:
Minutes:%s:
Seconds:%s:""" %
(retValueTable["$month"],
retValueTable["$date"],
retValueTable["$year"],
retValueTable["$hours"],
retValueTable["$minutes"],
retValueTable["$seconds"]))
return
if command[0].lower() == 'r_info':
rdata = "-"
if command[1] in retValueTable:
command[1] = retValueTable[command[1]]
if len(command) > 1:
rdata = command[2]
resultPrinted = 1
set_test_result(command[1], rdata, "-")
XLogger.setTestResult(command[1], rdata)
wfa_sys_exit_0()
return
if command[0].lower() == 'info':
set_color(FOREGROUND_CYAN | FOREGROUND_INTENSITY)
if command[1] in retValueTable:
command[1] = retValueTable[command[1]]
logging.info("\n %7s ~~~~~ %s ~~~~~ \n" %("", command[1]))
set_color(FOREGROUND_INTENSITY)
return
if re.search('esultIBSS', command[0]):
time.sleep(5)
printStreamResults()
process_passFailIBSS(command[1])
return
elif re.search('define', command[0]):
logging.debug("..Define %s = %s"%(command[1], command[2]))
if command[1] in retValueTable:
if command[2] in retValueTable:
command[2] = retValueTable[command[2]]
retValueTable[command[1]] = command[2]
else:
if command[2] in retValueTable:
command[2] = retValueTable[command[2]]
retValueTable.setdefault(command[1], command[2])
return
elif command[0].lower() == 'echo':
if command[1] in retValueTable:
logging.info("%s=%s" % (command[1], retValueTable[command[1]]))
else:
logging.info("Unknown variable %s" %command[1])
return
elif command[0].lower() == 'echo_ifnowts' and retValueTable["$WTS_ControlAgent_Support"] == "0":
if command[1] in retValueTable:
logging.info("-%s=%s-" % (command[1], retValueTable[command[1]]))
else:
logging.info("%s" % command[1])
return
elif command[0].lower() == 'storethroughput':
cmd = command[2].split(",")
logging.debug("Storing the Throughput(Mbps) value of stream %s[%s %s] in %s duration=%s p=%s", cmd[0], cmd[3], "%", command[1], retValueTable[cmd[2]], cmd[1])
P1 = -1
for p in streamRecvResultArray:
if p.streamID == retValueTable[cmd[0]] and int(p.phase) == int(cmd[1]):
P1 = p.rxBytes
P1 = int(int(P1) / 100) * int(cmd[3])
P1 = ((float(P1) * 8))/(1000000 * int(retValueTable[cmd[2]]))
break
logging.info("Storing %s = %s [Mbps]", command[1], P1)
if command[1] in retValueTable:
retValueTable[command[1]] = P1
else:
retValueTable.setdefault(command[1], P1)
return
elif command[0].lower() == 'resultwmm':
time.sleep(5)
printStreamResults()
process_passFailWMM(command[1])
return
elif command[0].lower() == 'resultwmm_1':
time.sleep(5)
printStreamResults()
process_passFailWMM_1(command[1])
return
elif re.search('CheckThroughput', command[0]):
time.sleep(5)
printStreamResults()
process_CheckThroughput(command[1], 0)
return
elif re.search('CheckMCSThroughput', command[0]):
time.sleep(5)
printStreamResults()
process_CheckMCSThroughput(command[1])
return
elif re.search('CheckDT4Result', command[0]):
time.sleep(5)
printStreamResults()
process_CheckDT4(command[1])
return
elif re.search('TransactionThroughput', command[0]):
time.sleep(5)
printStreamResults()
process_CheckThroughput(command[1], 1)
return
elif re.search('esultCheck', command[0]):
time.sleep(5)
process_ResultCheck(command[1])
return
logging.debug("COMMAND - to %s" % command[0])
if command[0] == 'wfa_test_commands':
if command[1] in retValueTable:
command[1] = retValueTable[command[1]]
process_cmdfile("%s%s"%(uccPath, command[1]))
return
if command[0] == 'Phase':
RTPCount = 1
time.sleep(3)
logging.debug("Starting Phase - %s ..." % command[1])
runningPhase = command[1]
threadCount = 0
testRunning = 0
time.sleep(2)
return
if len(command) < 3:
logging.error('Incorrect format of line - %s', line)
return
ret_data_def = command[2]
ret_data_def_type = ret_data_def.split(',')
logging.debug("Command Return Type = %s" % (ret_data_def_type[0].lower()))
if ret_data_def_type[0] == 'STREAMID' or ret_data_def_type[0] == 'INTERFACEID' or ret_data_def_type[0] == 'PING':
ret_data_idx = ret_data_def_type[1]
elif ret_data_def_type[0] == 'RECV_ID':
recv_value = ret_data_def_type[1].split(' ')
i = 0
for r in recv_value:
recv_id[i] = r
i += 1
logging.debug('RECV ID %s', recv_id)
elif ret_data_def_type[0] == 'FILENAME':
upload_file_desc = open(ret_data_def_type[1], 'a')
| |
""" Standalone webinterface for Openstack Swift. """
# -*- coding: utf-8 -*-
import os, random
import time
import urlparse
import hmac, ast
import traceback
from hashlib import sha1
from keystoneauth1.identity import v3
from keystoneauth1 import session
from swiftclient import client
from django.shortcuts import render_to_response, redirect
from django.template import RequestContext
from django.contrib import messages
from django.conf import settings
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from django.core.files.base import ContentFile
from django.http import HttpResponse
from wsgiref.util import FileWrapper
from swiftbrowser.forms import CreateContainerForm, PseudoFolderForm, \
LoginForm, AddACLForm
from swiftbrowser.utils import replace_hyphens, prefix_list, \
pseudofolder_object_list, get_temp_key, get_base_url, get_temp_url
import swiftbrowser
from swiftbrowser.enc_swiftclient_API import EncSwiftclientAPI
from swiftbrowser.config import *
def login(request):
""" Tries to login user and sets session data """
request.session.flush()
form = LoginForm(request.POST or None)
if form.is_valid():
username = form.cleaned_data['username']
password = form.cleaned_data['password']
try:
auth_version = settings.SWIFT_AUTH_VERSION or 1
user = username[username.find(':')+1:]
project = username[:username.find(':')]
auth_obj = v3.Password(auth_url=AUTH_URL, username=user,password=password, project_domain_name="Default", user_domain_name="Default", project_name=project)
sess = session.Session(auth=auth_obj)
auth_token = sess.get_token()
project_id = sess.get_project_id()
storage_url = '%s/AUTH_%s' %(STORAGE_URL, str(sess.get_project_id()))
#(storage_url, auth_token) = client.get_auth(
# settings.SWIFT_AUTH_URL, username, password,
# auth_version=auth_version)
#(meta_storage_url, meta_auth_token) = client.get_auth(
# settings.SWIFT_AUTH_URL, META_TENANT + username[username.find(':'):], password,
# auth_version=auth_version)
request.session['auth_token'] = auth_token
#request.session['meta_auth_token'] = meta_auth_token
request.session['storage_url'] = storage_url
#request.session['meta_storage_url'] = meta_storage_url
request.session['project_id'] = project_id
request.session['username'] = user
request.session['name'] = user
return redirect(containerview)
except client.ClientException:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Login failed."))
except Exception,err:
print Exception, err
return render_to_response('login.html', {'form': form, },
context_instance=RequestContext(request))
def containerview(request):
""" Returns a list of all containers in current account. """
storage_url = request.session.get('storage_url', '')
#meta_storage_url = request.session.get('meta_storage_url', '')
auth_token = request.session.get('auth_token', '')
#meta_auth_token = request.session.get('meta_auth_token', '')
username = request.session.get('username', '')
project_id = request.session.get('project_id','')
if not storage_url or not auth_token:
return redirect(login)
try:
conn = EncSwiftclientAPI(auth_token, project_id)
account_stat, containers = conn.get_account()
except client.ClientException as exc:
traceback.print_exc()
if exc.http_status == 403:
account_stat = {}
containers = []
base_url = get_base_url(request)
msg = 'Container listing failed. You can manually choose a known '
msg += 'container by appending the name to the URL, for example: '
msg += '<a href="%s/objects/containername">' % base_url
msg += '%s/objects/containername</a>' % base_url
messages.add_message(request, messages.ERROR, msg)
else:
return redirect(login)
except Exception:
traceback.print_exc()
return redirect(login)
account_stat = replace_hyphens(account_stat)
return render_to_response('containerview.html', {
'account_stat': account_stat,
'containers': containers,
'session': request.session,
}, context_instance=RequestContext(request))
def create_container(request):
""" Creates a container (empty object of type application/directory) """
storage_url = request.session.get('storage_url', '')
#meta_storage_url = request.session.get('meta_storage_url', '')
auth_token = request.session.get('auth_token', '')
#meta_auth_token = request.session.get('meta_auth_token', '')
username = request.session.get('username', '')
project_id = request.session.get('project_id','')
form = CreateContainerForm(request.POST or None)
if form.is_valid():
container = form.cleaned_data['containername']
try:
conn = EncSwiftclientAPI(auth_token, project_id)
conn.put_container(container)
messages.add_message(request, messages.INFO,
_("Container created."))
except client.ClientException:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Access denied."))
except Exception:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Something goes wrong. Try again!"))
return redirect(containerview)
return render_to_response('create_container.html', {
'session': request.session,
}, context_instance=RequestContext(request))
def delete_container(request, container):
""" Deletes a container """
storage_url = request.session.get('storage_url', '')
#meta_storage_url = request.session.get('meta_storage_url', '')
auth_token = request.session.get('auth_token', '')
#meta_auth_token = request.session.get('meta_auth_token', '')
username = request.session.get('username', '')
project_id = request.session.get('project_id','')
try:
conn = EncSwiftclientAPI(auth_token, project_id)
conn.delete_container(container)
messages.add_message(request, messages.INFO, _("Container deleted."))
except client.ClientException:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Access denied. If there are some files in %s, before delete them!" % container))
return redirect(containerview)
def objectview(request, container, prefix=None):
""" Returns list of all objects in current container. """
storage_url = request.session.get('storage_url', '')
#meta_storage_url = request.session.get('meta_storage_url', '')
auth_token = request.session.get('auth_token', '')
#meta_auth_token = request.session.get('meta_auth_token', '')
username = request.session.get('username', '')
project_id = request.session.get('project_id','')
try:
conn = EncSwiftclientAPI(auth_token, project_id)
meta, objects = conn.get_container(container, delimiter='/',
prefix=prefix)
except client.ClientException:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Access denied."))
return redirect(containerview)
prefixes = prefix_list(prefix)
pseudofolders, objs = pseudofolder_object_list(objects, prefix)
base_url = get_base_url(request)
account = storage_url.split('/')[-1]
read_acl = meta.get('x-container-read', '').split(',')
public = False
required_acl = ['.r:*', '.rlistings']
if [x for x in read_acl if x in required_acl]:
public = True
return render_to_response("objectview.html", {
'container': container,
'objects': objs,
'folders': pseudofolders,
'session': request.session,
'prefix': prefix,
'prefixes': prefixes,
'base_url': base_url,
'account': account,
'public': public},
context_instance=RequestContext(request))
def upload(request, container, prefix=None):
""" Display upload form using swift formpost """
storage_url = request.session.get('storage_url', '')
#meta_storage_url = request.session.get('meta_storage_url', '')
auth_token = request.session.get('auth_token', '')
#meta_auth_token = request.session.get('meta_auth_token', '')
username = request.session.get('username', '')
redirect_url = get_base_url(request)
redirect_url += reverse('objectview', kwargs={'container': container, })
swift_url = storage_url + '/' + container + '/'
if prefix:
swift_url += prefix
redirect_url += prefix
url_parts = urlparse.urlparse(swift_url)
path = url_parts.path
max_file_size = 5 * 1024 * 1024 * 1024
max_file_count = 1
expires = int(time.time() + 15 * 60)
key = get_temp_key(storage_url, auth_token)
if not key:
messages.add_message(request, messages.ERROR, _("Access denied."))
if prefix:
return redirect(objectview, container=container, prefix=prefix)
else:
return redirect(objectview, container=container)
hmac_body = '%s\n%s\n%s\n%s\n%s' % (
path, redirect_url, max_file_size, max_file_count, expires)
signature = hmac.new(str(key), str(hmac_body), sha1).hexdigest()
prefixes = prefix_list(prefix)
return render_to_response('upload_form.html', {
'swift_url': swift_url,
'redirect_url': redirect_url,
'max_file_size': max_file_size,
'max_file_count': max_file_count,
'expires': expires,
'signature': signature,
'container': container,
'prefix': prefix,
'prefixes': prefixes,
'session': request.session,
}, context_instance=RequestContext(request))
def put_object(request,container,prefix=None):
storage_url = request.session.get('storage_url', '')
#meta_storage_url = request.session.get('meta_storage_url', '')
auth_token = request.session.get('auth_token', '')
#meta_auth_token = request.session.get('meta_auth_token', '')
username = request.session.get('username', '')
project_id = request.session.get('project_id','')
redirect_url = get_base_url(request)
redirect_url += reverse('objectview', kwargs={'container': container, })
data = request.FILES.get('file1','')
if data == '':
messages.add_message(request, messages.ERROR, _("Wrong File. Select it again."))
if prefix:
return redirect(upload, container=container, prefix=prefix)
else:
return redirect(upload, container=container)
if prefix:
obj_url = prefix + data.name
redirect_url += prefix
else: obj_url = data.name
try:
conn = EncSwiftclientAPI(auth_token, project_id)
conn.put_object(container,obj_url,data.read())
messages.add_message(request, messages.INFO, _("Object uploaded."))
except client.ClientException:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Upload denied."))
return redirect(containerview)
except Exception:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Something goes wrong. Try again!"))
return redirect(containerview)
return redirect(redirect_url)
def download_dec(request, container, objectname):
""" Download an object (clear content) from Swift """
storage_url = request.session.get('storage_url', '')
#meta_storage_url = request.session.get('meta_storage_url', '')
auth_token = request.session.get('auth_token', '')
#meta_auth_token = request.session.get('meta_auth_token', '')
username = request.session.get('username', '')
project_id = request.session.get('project_id','')
redirect_url = get_base_url(request)
redirect_url += reverse('objectview', kwargs={'container': container, })
try:
conn = EncSwiftclientAPI(auth_token, project_id)
header, obj = conn.get_object(container, objectname)
except client.ClientException:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Access denied."))
return redirect(objectview, container=container)
except Exception:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Something goes wrong. Try again!"))
return redirect(objectview, container=container)
response = HttpResponse(FileWrapper(ContentFile(obj)), content_type=header['content-type'])
response['Content-Disposition'] = 'attachment; filename=%s' % objectname.split('/')[-1]
return response
def download_enc(request, container, objectname):
""" Download an object (encrypted content) from Swift """
'''storage_url = request.session.get('storage_url', '')
#meta_storage_url = request.session.get('meta_storage_url', '')
auth_token = request.session.get('auth_token', '')
#meta_auth_token = request.session.get('meta_auth_token', '')
username = request.session.get('username', '')
project_id = request.session.get('project_id','')
redirect_url = get_base_url(request)
redirect_url += reverse('objectview', kwargs={'container': container, })
try:
#conn = EncSwiftclientAPI(username[username.find(':')+1:],auth_token,meta_auth_token,storage_url,meta_storage_url)
header, obj = client.get_object(storage_url, auth_token, container, objectname)
except client.ClientException:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Access denied."))
return redirect(objectview, container=container)
except Exception:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Something goes wrong. Try again!"))
return redirect(objectview, container=container)
response = HttpResponse(FileWrapper(ContentFile(obj)), content_type=header['content-type'])
response['Content-Disposition'] = 'attachment; filename=%s_enc' % objectname.split('/')[-1]
return response'''
def delete_object(request, container, objectname):
""" Deletes an object """
storage_url = request.session.get('storage_url', '')
#meta_storage_url = request.session.get('meta_storage_url', '')
auth_token = request.session.get('auth_token', '')
#meta_auth_token = request.session.get('meta_auth_token', '')
username = request.session.get('username', '')
project_id = request.session.get('project_id','')
try:
conn = EncSwiftclientAPI(auth_token, project_id)
if objectname[-1] == '/':
meta, objects = conn.get_container(container, marker=None, delimiter='/', prefix=objectname)
if len(objects) > 1:
# pseudofolder is not empty
raise client.ClientException("It's not an empty pseudofolder")
conn.delete_object(container, objectname)
messages.add_message(request, messages.INFO, _("Object deleted."))
except client.ClientException:
traceback.print_exc()
if objectname[-1] == '/':
messages.add_message(request, messages.ERROR, _("It's not an empty pseudofolder. First, delete all the included objects!"))
else: messages.add_message(request, messages.ERROR, _("Access denied."))
if objectname[-1] == '/': # deleting a pseudofolder, move one level up
objectname = objectname[:-1]
prefix = '/'.join(objectname.split('/')[:-1])
if prefix:
prefix += '/'
return redirect(objectview, container=container, prefix=prefix)
def toggle_public(request, container):
""" Sets/unsets '.r:*,.rlistings' container read ACL """
storage_url = request.session.get('storage_url', '')
#meta_storage_url = request.session.get('meta_storage_url', '')
auth_token = request.session.get('auth_token', '')
#meta_auth_token = request.session.get('meta_auth_token', '')
username = request.session.get('username', '')
project_id = request.session.get('project_id','')
try:
conn = EncSwiftclientAPI(auth_token, project_id)
meta = conn.head_container(container)
except client.ClientException:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Access denied."))
return redirect(containerview)
read_acl = meta.get('x-container-read', '')
if '.rlistings' and '.r:*' in read_acl:
read_acl = read_acl.replace('.r:*', '')
read_acl = read_acl.replace('.rlistings', '')
read_acl = read_acl.replace(',,', ',')
else:
read_acl += '.r:*,.rlistings'
headers = {'x-container-read': read_acl, }
try:
conn = EncSwiftclientAPI(auth_token, project_id)
conn.post_container(container, headers)
except client.ClientException:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Access denied."))
except Exception:
traceback.print_exc()
messages.add_message(request, messages.ERROR, _("Something goes wrong. Try again!"))
return redirect(objectview, container=container)
def public_objectview(request, account, container, prefix=None):
messages.add_message(request, messages.ERROR, _("Function 'public_objectview' not implemented."))
'''""" Returns list of all objects in current container. """
| |
<gh_stars>100-1000
import struct
import os
from .base import FirmwareObject, RawObject, BaseObject, AutoRawObject
from .uefi import FirmwareVolume
from .utils import print_error, dump_data, sguid, green, blue
PFS_GUIDS = {
"FIRMWARE_VOLUMES": "7ec6c2b0-3fe3-42a0-a316-22dd0517c1e8",
"INTEL_ME": "7439ed9e-70d3-4b65-9e33-1963a7ad3c37",
"BIOS_ROMS_1": "08e56a30-62ed-41c6-9240-b7455ee653d7",
"BIOS_ROMS_2": "492261e4-0659-424c-82b6-73274389e7a7"
}
def _discover_volumes(data):
# Assume a series of firmware volumes
volumes = []
fv_offset = 0
while fv_offset < len(data):
fv = FirmwareVolume(data[fv_offset:], hex(fv_offset))
if not fv.valid_header:
break
if not fv.process():
return False
volumes.append(fv)
fv_offset += fv.size
return volumes
class PFRegion(FirmwareObject):
def __init__(self, header, data):
self.offset, self.size, self.address, self.name_offset = struct.unpack(
'<IIQI', header[:20])
self.header = header
self.data = data[:self.size]
@property
def objects(self):
return [self.obj]
def process(self):
self.obj = AutoRawObject(self.data)
if not self.obj.process():
self.obj = RawObject(self.data)
else:
self.obj = self.obj.object
return True
def showinfo(self, ts='', index=0):
print('%s%s size= 0x%x offset= 0x%x address= 0x%x name= 0x%x' % (
ts, blue("PHRegionEntry:"),
self.size, self.offset, self.address, self.name_offset
))
self.obj.showinfo('%s ' % (ts), index)
def dump(self, parent='', index=None):
if self.obj is not None:
self.obj.dump(parent, index=index)
class PFHeader(FirmwareObject, BaseObject):
'''A PFHeader: gh://skochinsky/181e6e338d90bb7f2693098dc43c6d54
struct PFRegionEntry
{
UINT32 FileOffset;
UINT32 Size;
UINT64 FlashAddress;
UINT32 NameOffset; //absolute offset
};
struct PFHeader
{
/* 00 */char Signature[4]; //$PFH
/* 04 */UINT32 dwVersion;
/* 08 */UINT32 dwHeaderSize;
/* 0C */UINT16 wHeaderChecksum;
/* 0E */UINT32 dwTotalImageSize;
/* 12 */UINT16 wTotalImageChecksum;
/* 04 */UINT32 dwNumberOfImages;
/* 18 */UINT32 imagetableOffset;
/* 1C */UINT32 unknown[48];
/* DC */PFImageEntry rgtable[1];
};
'''
def __init__(self, data):
self.valid_header = False
self.data = data
self.size = len(data)
if self.size < 32:
return
if self.data[:4] == b'$PFH':
self.valid_header = True
version, hdr_size, checksum, image_size, image_checksum, image_count, \
image_offset = struct.unpack('<IIHIHII', self.data[4:28])
self.hdr_size = hdr_size
self.image_count = image_count
self.image_offset = image_offset
@property
def objects(self):
return self.objs
def process(self):
if not self.valid_header:
return False
self.objs = []
data_offset = self.hdr_size
for i in range(self.image_count):
region_offset = 0xDC + (i * 20)
entry = PFRegion(self.data[region_offset:region_offset + 20],
self.data[data_offset:])
data_offset += entry.size
if entry.process():
self.objs.append(entry)
return True
def showinfo(self, ts='', index=None):
print('%s%s header size 0x%x images= %d offset= %s' % (
ts, blue("PFHeader:"),
self.hdr_size, self.image_count, self.image_offset
))
for i in range(len(self.objs)):
self.objs[i].showinfo('%s ' % (ts), index=i)
pass
def dump(self, parent='', index=None):
path = os.path.join(parent, "pfheader.pfh")
dump_data("%s" % path, self.data)
images = os.path.join(parent, "pfheader")
for i in range(len(self.objs)):
self.objs[i].dump(images, i)
class PFSPartitionedSection(FirmwareObject, BaseObject):
'''A PFSSection with embedded PFSFiles (with additional sections) that
split the content of the section across multiple chunks. The chunks and the
encompassing embedded PFSFile are named as 'partitioned' section.
Search within a PFSFile, similar to handling sets of PFSSections without
recording information/objects for each section. Instead, strip all data
and content, save for the defined body. The object then represents the
single, concatenated body.
'''
HEADER_SIZE = 0x48
DATA_OFFSET = 0x248
def __init__(self, data):
self.data = data
self.size = len(data)
self.section_objects = []
self.partitions = 0
self.section_data = b""
def process(self):
# The end removes the PFS trailer.
body_end = self.size - 0x10
# This data is the content of a section, with stripped PFS header.
# The first line will be the UUID.
self.uuid = self.data[0x0:0x10]
body_step = 0x10
# The stepping is equivilent to a PFSSection save for a 0x200-sized
# set of variables.
while body_step < body_end:
# The UUID for partitioned section is useless.
header = self.data[body_step:body_step + self.HEADER_SIZE]
if len(header) < self.HEADER_SIZE:
return False
self.partitions += 1
size = struct.unpack("<I", header[0x28:0x28 + 0x04])[0]
# Advance the seek pointer past the header.
body_step += self.HEADER_SIZE
# The section data seeks past an offset of variables.
data = self.data[body_step + self.DATA_OFFSET:body_step + size]
self.section_data += data
sig1_size, trp_size, sig2_size = struct.unpack("<III", header[0x2C:0x2C + 0x0C])
body_step += size + sig1_size + trp_size + sig2_size
# Now that section partitions are reconstructed, search for volumes.
volumes = _discover_volumes(self.section_data)
if volumes is False:
return False
self.section_objects = volumes
return True
@property
def objects(self):
return self.section_objects
def showinfo(self, ts='', index=None):
print("%s%s %s partitions %d size 0x%x (%d bytes)" % (
ts, blue("Dell PFSPartitionedSection:"), green(sguid(self.uuid)),
self.partitions, len(self.section_data), len(self.section_data)))
for sub_object in self.section_objects:
sub_object.showinfo("%s " % ts)
def dump(self, parent="", index=None):
path = os.path.join(parent, "%s" % sguid(self.uuid))
dump_data("%s.data" % path, self.section_data)
path = os.path.join(parent, "section-%s" % sguid(self.uuid))
for sub_object in self.section_objects:
sub_object.dump(path)
pass
class PFSSection(FirmwareObject, BaseObject):
HEADER_SIZE = 0x48
def __init__(self, data):
self.data = data
self.size = -1
# Store parsed objects (if any)
self.section_objects = []
def process(self):
hdr = self.data[:self.HEADER_SIZE]
self.uuid = hdr[:0x10]
self.header = hdr
# Spec seems to be a consistent 1, what I thought was a timestamp is not.
# Version is static except for the first section in a PFS
##spec, ts, ctype, version, _u1 = struct.unpack("<I4shh4s", hdr[0x10:0x20])
spec, version_type = struct.unpack("<I4s", hdr[0x10:0x10 + 0x08])
self.spec = spec
self.version = ""
self.type = 0
self.ts = 0
for i in range(4):
group_offset = 0x18 + (i * 2)
if version_type[i:i+1] == b'A':
self.version += "%X" % struct.unpack("<h", hdr[group_offset:group_offset + 2])
elif version_type[i:i+1] == b'N':
self.version += ".%d" % struct.unpack("<h", hdr[group_offset:group_offset + 2])
# U1, U2 might be flag containers
_u2, section_size, rsa1_size, pmim_size, rsa2_size = struct.unpack(
"<8sIIII", hdr[0x20:0x20 + 0x18])
# This seems to be a set of 8byte CRCs for each chunk (4 total)
self.crcs = hdr[0x20 + 0x18:self.HEADER_SIZE]
self.section_data = self.data[
self.HEADER_SIZE:self.HEADER_SIZE + section_size]
rsa1_offset = self.HEADER_SIZE + section_size
self.rsa1 = RawObject(self.data[rsa1_offset:rsa1_offset + rsa1_size])
pmim_offset = rsa1_offset + rsa1_size
self.pmim = RawObject(self.data[pmim_offset:pmim_offset + pmim_size])
rsa2_offset = pmim_offset + pmim_size
self.rsa2 = RawObject(self.data[rsa2_offset:rsa2_offset + rsa2_size])
# Unknown 8byte variable
# _u3 = self.data[64+total_chunk_size:64+total_chunk_size+8]
self.unknowns = [_u2]
# Size of header, data, and footer
total_size = section_size + rsa1_size + pmim_size + rsa2_size
self.section_size = self.HEADER_SIZE + total_size
self.data = None
if self.section_data[:0x08] == b"PFS.HDR.":
# Partitioned ROM
rom = PFSPartitionedSection(self.section_data)
if not rom.process():
return False
self.section_objects.append(rom)
elif sguid(self.uuid) == PFS_GUIDS["FIRMWARE_VOLUMES"]:
volumes = _discover_volumes(self.section_data)
if volumes is False:
return False
self.section_objects += volumes
else:
raw = AutoRawObject(self.section_data)
raw.process()
self.section_objects.append(raw)
@property
def objects(self):
return self.section_objects + [self.rsa1, self.pmim, self.rsa2]
def info(self, include_content=False):
return {
"_self": self,
"guid": sguid(self.uuid),
"type": "PFSSection",
"content": self.section_data if include_content else "",
"attrs": {
"size": self.section_size,
"crcs": self.crcs,
"unknowns": [u for u in self.unknowns],
"version": self.version
},
"chunks": [self.rsa1, self.pmim, self.rsa2] if include_content else []
}
pass
def build(self, generate_checksum=False, debug=False):
body = ""
for sub_object in self.section_objects:
body += sub_object.build(generate_checksum, debug=debug)
return self.header + body + \
self.rsa1.build(generate_checksum) + \
self.pmim.build(generate_checksum) + \
self.rsa2.build(generate_checksum)
pass
def showinfo(self, ts=0, index=None):
print("%s%s %s spec 0x%02x ts 0x%02x type 0x%02x version %s size 0x%x (%d bytes)" % (
ts, blue("Dell PFSSection:"), green(sguid(self.uuid)),
self.spec, self.ts, self.type, self.version,
self.section_size, self.section_size
))
for sub_object in self.section_objects:
sub_object.showinfo("%s " % ts)
pass
def dump(self, parent="", index=None):
path = os.path.join(parent, "%s" % sguid(self.uuid))
dump_data("%s.data" % path, self.section_data)
# Instead of calling dump on each chunk RawObject, dump with a better
# name.
if len(self.rsa1.data) > 0:
dump_data("%s.rsa1" % path, self.rsa1.data)
if len(self.pmim.data) > 0:
dump_data("%s.pmim" % path, self.pmim.data)
if len(self.rsa2.data) > 0:
dump_data("%s.rsa2" % path, self.rsa2.data)
path = os.path.join(parent, "section-%s" % sguid(self.uuid))
for sub_object in self.section_objects:
sub_object.dump(path)
pass
class PFSFile(FirmwareObject):
PFS_HEADER = b"PFS.HDR."
PFS_FOOTER = b"PFS.FTR."
def __init__(self, data):
self.sections = []
self.data = data
self.valid_header = False
if self.check_header():
self.valid_header = True
def check_header(self):
if len(self.data) < 32:
print_error("Data does not contain a header.")
return False
header = self.data[:0x10]
magic, spec, size = struct.unpack("<8sII", header)
self.spec = spec
self.size = size
if magic != self.PFS_HEADER:
print_error(
"Data does not contain the header magic (%s)." % self.PFS_HEADER)
return False
footer_offset = self.size + 0x10
footer = self.data[footer_offset:footer_offset + 0x10]
# Footer size is a repeated body size.
footer_size, _u2, footer_magic = struct.unpack("<II8s", footer)
if footer_magic != self.PFS_FOOTER:
print_error(
"Data does not container the footer magic (%s)." % self.PFS_FOOTER)
return False
return True
def process(self):
'''Chunks are assumed to contain a chunk header.'''
data = self.data[16:-16]
if not self.valid_header:
return False
chunk_num = 0
offset = 16
while True:
section = PFSSection(data)
section.process()
self.sections.append(section)
chunk_num += 1
offset += section.section_size
data = data[section.section_size:]
if len(data) < 64:
break
return True
@property
def | |
exclude: any
A value to exclude from the filtered pks to save to the temporary key
args: list
Any other argument to be passed by a subclass will be passed as addition
args to the script.
"""
self.model.database.call_script(
# be sure to use the script dict at the class level
# to avoid registering it many times
script_dict=self.__class__.lua_filter_script,
keys=[key, tmp_key],
args=[key_type, start, end, exclude or ""] + list(args) # None is refused by redis-py so we pass ""
)
def get_filtered_keys(self, suffix, *args, **kwargs):
"""Returns the index key for the given args "value" (`args`)
For the parameters, see ``BaseIndex.get_filtered_keys``
Notes
-----
The process of reading from the sorted-set, extracting the primary keys, excluding some
values if needed, and putting the primary keys in a set or zset, is done in lua at the
redis level.
"""
accepted_key_types = kwargs.get('accepted_key_types')
self._check_key_accepted_key_types(accepted_key_types)
key_type = 'set' if not accepted_key_types or 'set' in accepted_key_types else 'zset'
tmp_key = self._unique_key('tmp')
args = list(args)
# special "in" case: we get n keys and make an unionstore with them then return this key
if suffix == 'in':
values = set(args.pop())
if not values:
return [] # no keys
in_keys = [
self.get_filtered_keys('eq', *(args+[value]), **kwargs)[0][0]
for value in values
]
if key_type == 'set':
self.connection.sunionstore(tmp_key, in_keys)
else:
self.connection.zunionstore(tmp_key, in_keys)
# we can delete the temporary keys
for in_key in in_keys:
self.connection.delete(in_key)
return [(tmp_key, key_type, True)]
key = self.get_storage_key(*args)
value = self.normalize_value(args[-1], transform=False)
real_suffix = self.remove_prefix(suffix)
start, end, exclude = self.get_boundaries(real_suffix, value)
self.call_script(key, tmp_key, key_type, start, end, exclude)
return [(tmp_key, key_type, True)]
def get_pks_for_filter(self, key, filter_type, value):
"""Extract the pks from the zset key for the given type and value
This is used for the uniqueness check
Parameters
----------
key: str
The key of the redis sorted-set to use
filter_type: str
One of ``self.handled_suffixes``
value:
The normalized value for which we want the pks
Returns
-------
list
The list of instances PKs extracted from the sorted set
"""
raise NotImplementedError
class TextRangeIndex(BaseRangeIndex):
"""Index allowing to filter on something greater/less than a value
We use the zrangebylex redis command that was created for this very purpose
See Also
---------
https://redis.io/topics/indexes#lexicographical-indexes
"""
handled_suffixes = {None, 'eq', 'gt', 'gte', 'lt', 'lte', 'startswith', 'in'}
key = 'text-range'
separator = u':%s-SEPARATOR:' % key.upper()
lua_filter_script = {
# we extract members of the sorted-set via zrangebylex
# then we split the value and pk, on the separator
# if the value is the one in exclude, we ignore it
# and we add every pk to a set or zset depending on the asked type
# if a zset, we use the returned position as a score for each member
# we do this in block of 100 to avoid storing to many temporary things
# in memory
'lua': """
local source_key, dest_type, dest_key = KEYS[1], ARGV[1], KEYS[2]
local lex_start, lex_end = ARGV[2], ARGV[3]
local exclude, separator = ARGV[4], ARGV[5]
local start, block_size = 0, 100
while true do
local members = redis.call('zrangebylex', source_key, lex_start, lex_end, 'limit', start, block_size)
if members[1] == nil then -- nothing returned, we are done
break
end
local result, nb_results = {}, 0;
for i, member in ipairs(members) do
-- split to get value and pk (do it reverse to split on the last separator only)
local first_pos, last_pos = member:reverse():find(separator:reverse(), 1, true)
first_pos = member:len() - last_pos -- real position of last separator
-- only add if nothing to exclude, or the rest is not the exclude
if not exclude or member:sub(1, first_pos) ~= exclude then
nb_results = nb_results + 1
result[nb_results] = member:sub(first_pos + separator:len() + 1)
end
end
-- call sadd/zadd only if we have something to put in
if nb_results > 0 then -- sadly, no "continue" in lua :(
if dest_type == 'set' then
redis.call('sadd', dest_key, unpack(result))
else
-- zadd expect args this way: score member score member ...
local args = {}
for i, member in ipairs(result) do
args[2*i-1], args[2*i] = i-1, member
end
redis.call('zadd', dest_key, unpack(args))
end
end
-- if we got less than the max, it means we are done
if members[block_size] == nil then
break
end
-- loop again for the next block
start = start + block_size
end
-- return the key, because why not
return dest_key
"""
}
def prepare_data_to_store(self, pk, value, **kwargs):
"""Prepare the value to be stored in the zset
For the parameters, see BaseRangeIndex.prepare_data_to_store
We add a string "value:pk" to the storage sorted-set, with a score of 0.
Then when filtering will get then lexicographical ordered
And we'll later be able to extract the pk for each returned values
"""
value = self.normalize_value(value)
return self.separator.join([value, str(pk)]), 0
def _extract_value_from_storage(self, string):
"""Taking a string that was a member of the zset, extract the value and pk
Parameters
----------
string: str
The member extracted from the sorted set
Returns
-------
tuple
Tuple with the value and the pk, extracted from the string
"""
parts = string.split(self.separator)
pk = parts.pop()
return self.separator.join(parts), pk
def get_boundaries(self, filter_type, value):
"""Compute the boundaries to pass to zrangebylex depending of the filter type
The third return value, ``exclude`` is ``None`` except for the filters
`lt` and `gt` because we cannot explicitly exclude it when
querying the sorted-set
For the parameters, see BaseRangeIndex.store
Notes
-----
For zrangebylex:
- `(` means "not included"
- `[` means "included"
- `\xff` is the last char, it allows to say "starting with"
- `-` alone means "from the very beginning"
- `+` alone means "to the very end"
"""
assert filter_type in self.handled_suffixes
start = '-'
end = '+'
exclude = None
if filter_type in (None, 'eq'):
# we include the separator to only get the members with the exact value
start = u'[%s%s' % (value, self.separator)
end = start.encode('utf-8') + b'\xff'
elif filter_type == 'gt':
# starting at the value, excluded
start = u'(%s' % value
exclude = value
elif filter_type == 'gte':
# starting at the value, included
start = u'[%s' % value
elif filter_type == 'lt':
# ending with the value, excluded
end = u'(%s' % value
exclude = value
elif filter_type == 'lte':
# ending with the value, included (but not starting with, hence the separator)
end = u'[%s%s' % (value, self.separator)
end = end.encode('utf-8') + b'\xff'
elif filter_type == 'startswith':
# using `\xff` to simulate "startswith"
start = u'[%s' % value
end = start.encode('utf-8') + b'\xff'
return start, end, exclude
def get_pks_for_filter(self, key, filter_type, value):
"""Extract the pks from the zset key for the given type and value
For the parameters, see BaseRangeIndex.get_pks_for_filter
"""
start, end, exclude = self.get_boundaries(filter_type, value)
members = self.connection.zrangebylex(key, start, end)
if exclude is not None:
# special case where we don't want the exact given value, but we cannot
# exclude it from the sorted set directly
return [
member_pk
for member_value, member_pk in
[self._extract_value_from_storage(member) for member in members]
if member_value != exclude
]
else:
return [self._extract_value_from_storage(member)[-1] for member in members]
def call_script(self, key, tmp_key, key_type, start, end, exclude, *args):
"""Call the lua scripts with given keys and args
We add the separator to the arguments to be passed to the script
For the parameters, see BaseRangeIndex.call_script
"""
args = list(args)
args.append(self.separator)
super(TextRangeIndex, self).call_script(
key, tmp_key, key_type, start, end, exclude, *args
)
class NumberRangeIndex(BaseRangeIndex):
handled_suffixes = {None, 'eq', 'gt', 'gte', 'lt', 'lte', 'in'}
key = 'number-range'
raise_if_not_float = False
lua_filter_script = {
# we extract members of the sorted-set via zrangebyscore
# and we add every pk to a set or zset depending on the asked type
# if a zset, we use the returned position as a score for each member
# we do this in block of 100 to avoid storing to many temporary things
# in memory
'lua': """
local source_key, dest_type, dest_key = KEYS[1], ARGV[1], KEYS[2]
local score_start, score_end = ARGV[2], ARGV[3]
local start, block_size = | |
<filename>Welcomer 6.20/modules/core.py<gh_stars>10-100
import asyncio
import copy
import csv
import io
import math
from math import inf
import os
import sys
import time
import traceback
import logging
from importlib import reload
from datetime import datetime
import logging
import aiohttp
import discord
import requests
import json
import ujson
from discord.ext import commands
from rockutils import rockutils
import uuid
import handling
def canint(val):
try:
int(val)
return True
except BaseException:
return False
class NoPermission(Exception):
pass
class NoDonator(Exception):
pass
class WelcomerCore(commands.Cog):
def __init__(self, bot):
self.bot = bot
def maketimestamp(
self,
timestamp=0,
lang=[
"second",
"minute",
"hour",
"day",
"and",
"ago",
"year"],
allow_secs=False,
include_ago=True):
if not timestamp:
timestamp = 0
_y, _d, _h, _m, _s = rockutils.parse_unix(
datetime.utcnow().timestamp() - timestamp)
# message = ""
# if _y > 0:
# message += f"{str(_y)} {lang[6]}{'s' if _y > 1 else ''} "
# if _d > 0:
# if _h < 0:
# message += f"{lang[4]} "
# elif len(message) > 1:
# message += ", "
# message += f"{str(_d)} {lang[3]}{'s' if _d > 1 else ''} "
# if _h > 0:
# if _m < 0:
# message += f"{lang[4]} "
# elif len(message) > 1:
# message += ", "
# message += f"{str(_h)} {lang[2]}{'s' if _h > 1 else ''} "
# # if we dont allow seconds, round the minutes up
# if not allow_secs and _s > 0:
# _m += 1
# if _m > 0:
# if _h > 0 or _d > 0:
# message += f"{lang[4]} "
# message += f"{str(_m)} {lang[1]}{'s' if _m > 1 else ''} "
# if allow_secs:
# if _h > 0 or _d > 0 or _m > 0:
# message += f"{lang[4]} "
# message += f"{str(_s)} {lang[0]}{'s' if _s > 1 else ''} "
# if include_ago:
# message += lang[5]
# return message
message = ""
if _y > 0:
message += f"{_y} year{'s' if _y != 1 else ''}"
if _d > 0:
if _h < 0:
message += " and "
elif len(message) > 1:
message += ", "
message += f"{_d} day{'s' if _d != 1 else ''}"
if _h > 0:
if _m < 0:
message += " and "
elif len(message) > 1:
message += ", "
message += f"{_h} hour{'s' if _h != 1 else ''}"
if _m > 0:
if _s < 0 if allow_secs else (_h > 0 or _d > 0):
message += " and "
elif len(message) > 1:
message += ", "
message += f"{_m} minute{'s' if _m != 1 else ''}"
if allow_secs:
if _h > 0 or _d > 0 or _m > 0:
message += " and "
message += f"{_s} second{'s' if _s != 1 else ''}"
if include_ago:
message += " ago"
return message
async def get_value(self, table, key, default=None):
# print("FETCH", table, key)
async with self.bot.connection.acquire() as connection:
value = await connection.fetchrow(
f"SELECT * FROM {table} WHERE id = $1",
key
)
if value:
print("FETCH", table, key, "OK")
try:
return ujson.loads(value["value"])
except ValueError:
return json.loads(value["value"])
else:
print("FETCH", table, key, "FAIL")
return default
async def set_value(self, table, key, value):
if key is None:
key = str(<KEY>())
print("SET", table, key)
try:
async with self.bot.connection.acquire() as connection:
await connection.execute(
f"INSERT INTO {table}(id, value) VALUES($1, $2) ON CONFLICT (id) DO UPDATE SET value = $2",
key, ujson.dumps(value)
)
except Exception as e:
print("Failed to set value", table, ":", key, e)
# return False
else:
# return True
return {
"generated_keys": [key],
"inserted": 1
}
async def get_guild_info(self, id, refer="", reload_data=True, create_cache=True, direct=False, request_invites=True):
# rockutils.prefix_print(
# f"{f'[Refer: {refer}] ' if refer != '' else ''}Getting information for G:{id}",
# prefix="Guild Info:Get",
# prefix_colour="light green")
guild_info = await self.get_value("guilds", str(id))
# guild_info = await r.table("guilds").get(str(id)).run(self.bot.connection)
if not direct:
new_data = True if not isinstance(
guild_info, dict) else not bool(guild_info)
has_updated = True if new_data else False
guild = self.bot.get_guild(int(id))
_guild_info = self.bot.serialiser.guild(guild)
_time = time.time()
default_data = copy.deepcopy(self.bot.default_guild)
latest_version = default_data['d']['dv']
if new_data and guild:
# try:
# old_info = await r.db("welcomer5").table("guilds").get(str(id)).run(self.bot.connection)
# if old_info:
# default_data['a']['e'] = old_info['analytics']['enabled']
# default_data['ar']['e'] = old_info['autorole']['enabled']
# default_data['ar']['r'] = list(
# map(str, old_info['autorole']['role_ids']))
# for donation in old_info['donations']:
# default_data['d']['de'].append(donation['id'])
# default_data['d']['b']['hb'] = True
# default_data['l']['e'] = old_info['leaver']['enabled']
# if isinstance(old_info['leaver']['channel'], str):
# default_data['l']['c'] = old_info['leaver']['channel']
# default_data['l']['t'] = old_info['leaver']['text']
# if "prefix" in old_info:
# default_data['d']['b']['p'] = old_info['prefix']
# default_data['r']['e'] = old_info['rules']['enabled']
# default_data['r']['r'] = old_info['rules']['rules']
# default_data['d']['b']['ai'] = old_info['settings']['allow_invite']
# default_data['d']['b']['d'] = old_info['settings']['description']
# default_data['d']['b']['ss'] = old_info['settings']['show_staff']
# default_data['st']['ap'] = old_info['staff']['allow_ping']
# for staff_id, allow_ping in old_info['staff']['staff_ids'].items():
# default_data['st']['u'].append(
# [staff_id, allow_ping])
# # for channel_id, stat in old_info['stats']['channels']:
# # stats = {}
# # stats['c'] = channel_id
# # stats['t'] = stat['type']
# # stats['t'] = stat['text']
# # default_data['s']['c'].append(stat)
# default_data['s']['c'] = old_info['stats']['channels']
# if isinstance(old_info['stats']['enabled'], str):
# default_data['s']['e'] = old_info['stats']['enabled']
# default_data['s']['ca'] = old_info['stats']['category']
# default_data['tc']['e'] = old_info['tempchannels']['enabled']
# if isinstance(old_info['tempchannels']['category'], str):
# default_data['tc']['c'] = old_info['tempchannels']['category']
# default_data['tc']['ap'] = old_info['tempchannels']['autopurge']
# if isinstance(old_info['welcomer']['channel'], str):
# default_data['w']['c'] = old_info['welcomer']['channel']
# default_data['w']['e'] = old_info['welcomer']['enable_embed']
# default_data['w']['b'] = old_info['welcomer']['text']['badges']
# default_data['w']['iv'] = old_info['welcomer']['text']['invited']
# default_data['w']['i']['e'] = old_info['welcomer']['images']['enabled']
# default_data['w']['i']['bg'] = old_info['welcomer']['images']['background']
# # default_data['w']['i']['c']['bo'] = old_info['welcomer']['images']['colour']['border']
# # default_data['w']['i']['c']['b'] = old_info['welcomer']['images']['colour']['text']
# # default_data['w']['i']['c']['pb'] = old_info['welcomer']['images']['colour']['profile']
# default_data['w']['i']['m'] = old_info['welcomer']['images']['message']
# default_data['w']['t']['e'] = old_info['welcomer']['text']['enabled']
# default_data['w']['t']['m'] = old_info['welcomer']['text']['message']
# default_data['w']['dm']['e'] = old_info['welcomer']['dm']['enabled']
# default_data['w']['dm']['m'] = old_info['welcomer']['text']['message']
# if "namepurge" in old_info['welcomer']:
# default_data['np']['e'] = old_info['welcomer']['namepurge']['enabled']
# default_data['np']['f'] = list(map(lambda o: o.replace(
# "\n", ""), old_info['welcomer']['namepurge']['filter']))
# except BaseException:
# exc_info = sys.exc_info()
# traceback.print_exception(*exc_info)
guild_info = default_data
origional_guild_info = copy.deepcopy(guild_info)
guild_info['d']['b']['c'] = self.bot.cluster_id
guild_info['id'] = str(id)
if self.bot.donator:
guild_info['d']['b']['hd'] = True
elif guild:
if not guild.get_member(498519480985583636):
guild_info['d']['b']['hd'] = False
if guild:
if new_data:
guild_info['d']['g']['ga'] = math.ceil(_time)
guild_info['d']['g']['gc'] = math.ceil(
guild.created_at.timestamp())
if request_invites:
try:
guild_info['d']['i'] = await self.bot.serialiser.invites(guild)
except BaseException:
pass
guild_info['d']['g']['i'] = _guild_info['icons']
guild_info['d']['g']['ic'] = _guild_info['icon']
guild_info['d']['g']['n'] = _guild_info['name']
guild_info['d']['b']['r'] = _guild_info['region']
guild_info['d']['b']['sh'] = guild.shard_id
if guild.owner or guild.owner_id:
try:
owner_id = guild.owner.id
except:
owner_id = guild.owner_id
user = self.bot.get_user(owner_id)
if user:
guild_info['d']['g']['o'] = self.bot.serialiser.user(
user)
if _time - guild_info['d']['m']['u'] > 600:
guild_info['d']['m'] = {
"b": _guild_info['bots'],
"m": _guild_info['users'] - _guild_info['bots'],
"a": _guild_info['users'],
"u": _time
}
# if _time - guild_info['d']['d']['u'] > 600:
# _guild_detailed = self.bot.serialiser.guild_detailed(
# guild)
# guild_info['d']['d'] = {
# "s": _guild_detailed['streaming'],
# "o": _guild_detailed['online'],
# "i": _guild_detailed['idle'],
# "d": _guild_detailed['dnd'],
# "of": _guild_detailed['offline'],
# "u": _time
# }
if _time - guild_info['d']['c']['u'] > 600:
_channels = self.bot.serialiser.channels(guild)
guild_info['d']['c'] = {
"c": _channels['categories'],
"v": _channels['voice'],
"t": _channels['text'],
"u": _time
}
if "r" not in guild_info['d'] or (
_time - guild_info['d']['r']['u'] > 600):
_roles = self.bot.serialiser.roles(guild)
guild_info['d']['r'] = {
"r": _roles,
"u": _time
}
has_updated = True if guild_info != origional_guild_info else has_updated
if latest_version != guild_info['d']['dv']:
default_data.update(guild_info)
guild_info = default_data
_version = guild_info['d']['dv']
if _version == 0:
# example hardcoded data overwrite
pass
if "sw" not in guild_info['d']['b']:
guild_info['d']['b']['sw'] = True
guild_info['d']['dv'] = default_data['d']['dv']
has_updated = True
if not isinstance(guild_info['s']['c'], list):
print("Emptying channel list")
guild_info['s']['c'] = []
def normalize_colour(string):
if string.startswith("RGBA|"):
return string
elif string.startswith("RGB|"):
return string
else:
try:
_hex = str(hex(int(string)))[2:]
if len(_hex) >= 8:
return f"RGBA|{str(hex(string))[:8]}"
elif len(_hex) >= 6:
return f"RGB|{str(hex(string))[:6]}"
except BaseException:
pass
return f"RGB|FFFFFF"
keys = ['w.i.c.b', 'w.i.c.b', '<KEY>', 'w.i.c.ib']
for key in keys:
value = rockutils.getvalue(key, guild_info)
value = str(value)
if not value.startswith("R"):
newvalue = normalize_colour(value)
rockutils.setvalue(key, guild_info, newvalue)
# print("create cache", create_cache)
if create_cache:
guild = self.bot.get_guild(int(id))
if guild:
await self.create_guild_cache(guild_info, guild, force=True)
else:
rockutils.prefix_print(
f"Wanted to make cache for {id} but no guild object", prefix="createcache", prefix_colour="red", text_colour="light red")
create_cache = False
if has_updated or new_data:
if new_data:
# rockutils.prefix_print(
# f"{f'[Refer: {refer}] ' if refer != '' else ''}Creating information for G:{id}",
# prefix="Guild Info:Get",
# prefix_colour="light green")
# await r.table("guilds").insert(guild_info).run(self.bot.connection)
await self.set_value("guilds", guild_info["id"], guild_info)
else:
await self.update_guild_info(id, guild_info, refer="getguildinfo:" + (refer or "?"))
# print("create cache", create_cache)
if create_cache:
guild = self.bot.get_guild(int(id))
if guild:
await self.create_guild_cache(guild_info, guild, force=True)
else:
rockutils.prefix_print(
f"Wanted to make cache for {id} but no guild object", prefix="createcache", | |
<reponame>frochet/torps
### Classes implementing "network modification" interface, i.e. modify_network_state() ###
from stem import Flag
from stem.exit_policy import ExitPolicy
import pathsim
class Enum(tuple): __getattr__ = tuple.index
### Class inserting adversary relays ###
class AdversaryInsertion(object):
def add_adv_guards(self, num_adv_guards, bandwidth):
""""Adds adv guards into self.add_relays and self.add_descriptors."""
#, adv_relays, adv_descriptors
for i in xrange(num_adv_guards):
# create consensus
num_str = str(i+1)
fingerprint = '0' * (40-len(num_str)) + num_str
nickname = 'BadGuyGuard' + num_str
flags = [Flag.FAST, Flag.GUARD, Flag.RUNNING, Flag.STABLE,
Flag.VALID]
self.adv_relays[fingerprint] = pathsim.RouterStatusEntry(fingerprint,
nickname, flags, bandwidth)
# create descriptor
hibernating = False
family = {}
address = '10.'+num_str+'.0.0' # avoid /16 conflicts
exit_policy = ExitPolicy('reject *:*')
ntor_onion_key = num_str # indicate ntor support w/ val != None
self.adv_descriptors[fingerprint] = pathsim.ServerDescriptor(fingerprint,
hibernating, nickname, family, address, exit_policy,
ntor_onion_key)
def add_adv_exits(self, num_adv_guards, num_adv_exits, bandwidth):
""""Adds adv exits into self.add_relays and self.add_descriptors."""
for i in xrange(num_adv_exits):
# create consensus
num_str = str(i+1)
fingerprint = 'F' * (40-len(num_str)) + num_str
nickname = 'BadGuyExit' + num_str
flags = [Flag.FAST, Flag.EXIT, Flag.RUNNING, Flag.STABLE,
Flag.VALID]
self.adv_relays[fingerprint] = pathsim.RouterStatusEntry(fingerprint,
nickname, flags, bandwidth)
# create descriptor
hibernating = False
family = {}
address = '10.'+str(num_adv_guards+i+1)+'.0.0' # avoid /16 conflicts
exit_policy = ExitPolicy('accept *:*')
ntor_onion_key = num_str # indicate ntor support w/ val != None
self.adv_descriptors[fingerprint] = pathsim.ServerDescriptor(fingerprint,
hibernating, nickname, family, address, exit_policy,
ntor_onion_key)
def compute_tot_bandwidths(self, cons_rel_stats, descriptors):
""" Compute
G the total bandwidth for Guard-flagged nodes
M the total bandwidth for non-flagged nodes
E the total bandwidth for Exit-flagged nodes
D the total bandwidth for Guard+Exit-flagged nodes
T = G+M+E+D
"""
def filter_flags(cons_rel_stats, descriptors, flags, no_flags):
nodes = []
for fprint in cons_rel_stats:
rel_stat = cons_rel_stats[fprint]
i = 0
j = 0
for flag in no_flags:
if flag in rel_stat.flags:
j+=1
for flag in flags:
if flag in rel_stat.flags:
i+=1
if i == len(flags) and j==0 and (fprint in descriptors\
or fprint in self.adv_descriptors):
nodes.append(fprint)
return nodes
guards = filter_flags(cons_rel_stats, descriptors,\
[Flag.RUNNING, Flag.VALID, Flag.GUARD], [Flag.EXIT])
exits = filter_flags(cons_rel_stats, descriptors,\
[Flag.RUNNING, Flag.VALID, Flag.EXIT], [Flag.GUARD])
middles = filter_flags(cons_rel_stats, descriptors,\
[Flag.RUNNING, Flag.VALID], [Flag.GUARD, Flag.EXIT])
guards_exits = filter_flags(cons_rel_stats, descriptors,\
[Flag.RUNNING, Flag.VALID, Flag.GUARD, Flag.EXIT], [])
G = M = E = D = T = 0
for fprint in guards:
G += cons_rel_stats[fprint].bandwidth
for fprint in middles:
M += cons_rel_stats[fprint].bandwidth
for fprint in exits:
E += cons_rel_stats[fprint].bandwidth
for fprint in guards_exits:
D += cons_rel_stats[fprint].bandwidth
T = G+M+E+D
return (int(G), int(M), int(E), int(D), int(T))
def check_weights_errors(self, Wgg, Wgd, Wmg, Wme, Wmd, Wee, Wed,
weightscale, G, M, E, D, T, margin, do_balance):
"""Verify that our weights satify the formulas from dir-spec.txt"""
def check_eq(a, b, margin):
return (a - b) <= margin if (a - b) >= 0 else (b - a) <= margin
def check_range(a, b, c, d, e, f, g, mx):
return (a >= 0 and a <= mx and b >= 0 and b <= mx and\
c >= 0 and c <= mx and d >= 0 and d <= mx and\
e >= 0 and e <= mx and f >= 0 and f <= mx and\
g >= 0 and g <= mx)
# Wed + Wmd + Wgd == weightscale
if (not check_eq(Wed+Wmd+Wgd, weightscale, margin)):
return self.bww_errors.SUMD_ERROR
# Wmg + Wgg == weightscale
if (not check_eq(Wmg+Wgg, weightscale, margin)):
return self.bww_errors.SUMG_ERROR
# Wme + Wee == 1
if (not check_eq(Wme+Wee, weightscale, margin)):
return self.bww_errors.SUME_ERROR
# Verify weights within range 0 -> weightscale
if (not check_range(Wgg, Wgd, Wmg, Wme, Wmd, Wed, Wee, weightscale)):
return self.bww_errors.RANGE_ERROR
if (do_balance):
#Wgg*G + Wgd*D == Wee*E + Wed*D
if (not check_eq(Wgg*G+Wgd*D, Wee*E+Wed*D, (margin*T)/3)):
return self.bww_errors.BALANCE_EG_ERROR
#Wgg*G+Wgd*D == M*weightscale + Wmd*D + Wme * E + Wmg*G
if (not check_eq(Wgg*G+Wgd*D, M*weightscale+Wmd*D+Wme*E+Wmg*G,\
(margin*T)/3)):
return self.bww_errors.BALANCE_MID_ERROR
return self.bww_errors.NO_ERROR
def __init__(self, args, testing):
self.adv_time = args.adv_time
self.adv_relays = {}
self.adv_descriptors = {}
self.add_adv_guards(args.num_adv_guards, args.adv_guard_cons_bw)
self.add_adv_exits(args.num_adv_guards, args.num_adv_exits,
args.adv_exit_cons_bw)
self.testing = testing
self.first_modification = True
self.bww_errors = Enum(("NO_ERROR","SUMG_ERROR", "SUME_ERROR",\
"SUMD_ERROR","BALANCE_MID_ERROR", "BALANCE_EG_ERROR",\
"RANGE_ERROR"))
def modify_network_state(self, network_state):
"""Adds adversarial guards and exits to cons_rel_stats and
descriptors dicts."""
# add adversarial descriptors to nsf descriptors
# only add once because descriptors variable is assumed persistant
if (self.first_modification == True):
network_state.descriptors.update(self.adv_descriptors)
self.first_modification = False
# if insertion time has been reached, add adversarial relays into
# consensus and hibernating status list
if (self.adv_time <= network_state.cons_valid_after):
# include additional relays in consensus
if self.testing:
print('Adding {0} relays to consensus.'.format(\
len(self.adv_relays)))
for fprint, relay in self.adv_relays.iteritems():
if fprint in network_state.cons_rel_stats:
raise ValueError(\
'Added relay exists in consensus: {0}:{1}'.\
format(relay.nickname, fprint))
network_state.cons_rel_stats[fprint] = relay
# include hibernating statuses for added relays
network_state.hibernating_statuses.extend([(0, fp, False) \
for fp in self.adv_relays])
# recompute bwweights taking into account the new nodes added
(casename, Wgg, Wgd, Wee, Wed, Wmg, Wme, Wmd) =\
self.recompute_bwweights(network_state)
bwweights = network_state.cons_bw_weights
if self.testing:
print("""New computation of bwweights, network load case
is {0} with weights Wgg={1}, Wgd={2}, Wee={3},
Wed={4}, Wmg={5}, Wme={6}, Wmd={7}.\n
The weights received from the consensus are Wgg=
{8}, Wgd={9}, Wee={10}, Wed={11}, Wmg={12}, Wme=
{13}, Wmd={14} """.format(casename, Wgg, Wgd, Wee,\
Wed, Wmg, Wme, Wmd, bwweights['Wgg'], bwweights['Wgd'],\
bwweights['Wee'], bwweights['Wed'], bwweights['Wmg'],\
bwweights['Wme'], bwweights['Wmd']))
bwweights['Wgg'] = Wgg
bwweights['Wgd'] = Wgd
bwweights['Wee'] = Wee
bwweights['Wed'] = Wed
bwweights['Wmg'] = Wmg
bwweights['Wme'] = Wme
bwweights['Wmd'] = Wmd
def recompute_bwweights(self, network_state):
"""Detects in which network case load we are according to section 3.8.3
of dir-spec.txt from Tor' specifications and recompute bandwidth weights
"""
(G, M, E, D, T) = self.compute_tot_bandwidths(network_state.cons_rel_stats,\
network_state.descriptors)
weightscale = network_state.cons_bwweightscale
if (3*E >= T and 3*G >= T):
#Case 1: Neither are scarce
casename = "Case 1 (Wgd=Wmd=Wed)"
Wgd = Wed = Wmd = weightscale/3
Wee = (weightscale*(E+G+M))/(3*E)
Wme = weightscale - Wee
Wmg = (weightscale*(2*G-E-M))/(3*G)
Wgg = weightscale - Wmg
check = self.check_weights_errors(Wgg, Wgd, Wmg, Wme, Wmd, Wee, Wed,\
weightscale, G, M, E, D, T, 10, True)
if (check):
raise ValueError(\
'ERROR: {0} Wgd={1}, Wed={2}, Wmd={3}, Wee={4},\
Wmd={4}, Wgg={6}'.format(self.bww_errors[check],\
Wgd, Wed, Wmd, Wee, Wmg, Wgg))
elif (3*E < T and 3*G < T):
#Case 2: Both Guards and Exits are scarce
#Balance D between E and G, depending upon D capacity and
#scarcity
R = min(E, G)
S = max(E, G)
if (R+D < S):
#subcase a
Wgg = Wee = weightscale
Wmg = Wme = Wmd = 0
if (E < G):
casename = "Case 2a (E scarce)"
Wed = weightscale
Wgd = 0
else:
# E >= G
casename = "Case 2a (G scarce)"
Wed = 0
Wgd = weightscale
else:
#subcase b R+D >= S
casename = "Case 2b1 (Wgg=weightscale, Wmd=Wgd)"
Wee = (weightscale*(E-G+M))/E
Wed = (weightscale*(D-2*E+4*G-2*M))/(3*D)
Wme = (weightscale*(G-M))/E
Wmg = 0
Wgg = weightscale
Wmd = Wgd = (weightscale-Wed)/2
check = self.check_weights_errors(Wgg, Wgd, Wmg, Wme, Wmd,\
Wee, Wed, weightscale, G, M, E, D, T, 10, True)
if (check):
casename = 'Case 2b2 (Wgg=1, Wee=1)'
Wgg = Wee = weightscale
Wed = (weightscale*(D-2*E+G+M))/(3*D)
Wmd = (weightscale*(D-2*M+G+E))/(3*D)
Wme = Wmg = 0
if (Wmd < 0):
#Too much bandwidth at middle position
casename = 'case 2b3 (Wmd=0)'
Wmd = 0
Wgd = weightscale - Wed - Wmd
check = self.check_weights_errors(Wgg, Wgd, Wmg, Wme, Wmd,\
Wee, Wed, weightscale, G, M, E, D, T, 10, True)
if (check != self.bww_errors.NO_ERROR and check !=\
self.bww_errors.BALANCE_MID_ERROR):
raise ValueError(\
'ERROR: {0} Wgd={1}, Wed={2}, Wmd={3}, Wee={4},\
Wmd={4}, Wgg={6}'.format(self.bww_errors[check],\
Wgd, Wed, Wmd, Wee, Wmg, Wgg))
else: # if (E < T/3 or G < T/3)
#Case 3: Guard or Exit is scarce
S = min(E, G)
if (not (3*E < T or 3*G < T) or not (3*G >= T or 3*E >= T)):
raise ValueError(\
'ERROR: Bandwidths have inconsistants values \
M={0}, E={1}, D={2}, T={3}'.format(M,E,D,T))
if (3*(S+D) < T):
#subcasea: S+D < T/3
if (G < E):
casename = 'Case 3a (G scarce)'
Wgg = Wgd = weightscale
Wmd = Wed = Wmg = 0
if (E < M): Wme = 0
else: Wme = (weightscale*(E-M))/(2*E)
Wee = weightscale - Wme
else:
# G >= E
casename = "Case 3a (E | |
# File: vxstream_connector.py
#
# Licensed under Apache 2.0 (https://www.apache.org/licenses/LICENSE-2.0.txt)
#
# Phantom imports
import phantom.app as phantom
from phantom.base_connector import BaseConnector
from phantom.action_result import ActionResult
try:
from phantom.vault import Vault
except:
import phantom.vault as Vault
from vxstream_consts import *
# Other imports used by this connector
import json
import requests
import uuid
import shutil
import os
from io import BytesIO
import gzip
import time
from datetime import datetime
import urllib
from urlparse import urlparse
from os.path import splitext, basename
from api_classes.api_key_current import ApiKeyCurrent
from api_classes.api_search_terms import ApiSearchTerms
from api_classes.api_search_hash import ApiSearchHash
from api_classes.api_submit_file import ApiSubmitFile
from api_classes.api_submit_online_file import ApiSubmitOnlineFile
from api_classes.api_submit_url_for_analysis import ApiSubmitUrlForAnalysis
from api_classes.api_report_summary import ApiReportSummary
from api_classes.api_report_file import ApiReportFile
from api_classes.api_report_state import ApiReportState
from api_classes.api_submit_hash_for_url import ApiSubmitHashForUrl
class VxError(Exception):
pass
class VxStreamConnector(BaseConnector):
ACTION_ID_TEST_ASSET_CONNECTIVITY = 'test_asset_connectivity'
ACTION_ID_DETONATE_URL = 'detonate_url'
ACTION_ID_DETONATE_FILE = 'detonate_file'
ACTION_ID_DETONATE_ONLINE_FILE = 'detonate_online_file'
ACTION_ID_GET_REPORT = 'get_report'
ACTION_ID_SEARCH_TERMS = 'search_terms'
ACTION_ID_HUNT_FILE = 'hunt_file'
ACTION_ID_HUNT_HASH = 'hunt_hash'
ACTION_ID_HUNT_IP = 'hunt_ip'
ACTION_ID_HUNT_URL = 'hunt_url'
ACTION_ID_HUNT_DOMAIN = 'hunt_domain'
ACTION_ID_HUNT_MALWARE_FAMILY = 'hunt_malware_family'
ACTION_ID_HUNT_SIMILAR = 'hunt_similar'
ACTION_ID_GET_FILE = 'get_file'
ACTION_ID_GET_PCAP = 'get_pcap'
ACTION_ID_GET_FILE_FROM_URL = 'get_file_from_url'
ACTION_ID_CHECK_STATUS = 'check_status'
ACTION_ID_CHECK_URL_HASH = 'check_url_hash'
_base_url = ''
_request_session = None
def __init__(self):
super(VxStreamConnector, self).__init__()
self._api_token = None
def initialize(self):
config = self.get_config()
self._base_url = config[PAYLOAD_SECURITY_WEBSERVICE_BASE_URL]
if self._base_url.endswith('/'):
self._base_url = self._base_url[:-1]
if self._base_url.endswith('vxstream-sandbox.com'):
self._base_url = self._base_url.replace('vxstream-sandbox.com', 'falcon-sandbox.com')
if 'https://' not in self._base_url:
self.save_progress('Warning: Using encrypted connection over https is strongly recommended.')
self._request_session = requests.Session()
return phantom.APP_SUCCESS
def handle_exception(self, exception):
self.set_status(phantom.APP_ERROR, 'Unexpected error has occurred')
return self.get_status()
def _get_file_dict(self, param, action_result):
vault_id = param['vault_id']
try:
if hasattr(Vault, 'get_file_path'):
payload = open(Vault.get_file_path(vault_id), 'rb')
else:
payload = open(Vault.get_vault_file(vault_id), 'rb')
except:
return action_result.set_status(phantom.APP_ERROR, 'File not found in vault ("{}")'.format(vault_id)), None
files = {'file': (param['file_name'], payload)}
return phantom.APP_SUCCESS, files
def _make_api_call(self, api_object):
config = self.get_config()
api_object.call(self._request_session, verify_server=config[PAYLOAD_SECURITY_VERIFY_SERVER_CERT])
def _make_api_call_with_err_handling(self, api_object, base_err_msg):
try:
self._make_api_call(api_object)
except requests.exceptions.RequestException as exc:
raise VxError('{} Connection to server failed. Error: \'{}\''.format(base_err_msg, str(exc)))
if api_object.if_request_success() is False:
raise VxError('{} {}'.format(base_err_msg, api_object.get_prepared_response_msg()))
return api_object
def _build_sample_url(self, id):
if ':' in id:
sha256, env_id = id.split(':')
url = '/sample/{}?environmentId={}'.format(sha256, env_id)
elif len(id) == 24:
url = '/sample/{}/find'.format(id)
else:
url = '/sample/{}'.format(id)
return '{}{}'.format(self._base_url, url)
def _check_status_partial(self, param):
config = self.get_config()
api_check_state = ApiReportState(config[PAYLOAD_SECURITY_API_KEY], self._base_url, self)
api_check_state.attach_params({'id': param['id']})
return self._make_api_call_with_err_handling(api_check_state, 'Getting sample status failed.')
def _check_status(self, param):
self.save_progress(PAYLOAD_SECURITY_MSG_QUERYING)
action_result = self.add_action_result(ActionResult(dict(param)))
try:
api_check_state = self._check_status_partial(param)
except VxError as exc:
action_result.set_status(phantom.APP_ERROR, '{}'.format(str(exc)))
return action_result.get_status()
api_response_json = api_check_state.get_response_json()
api_response_json['sample_url'] = self._build_sample_url(param['id'])
api_response_json['status'] = api_response_json['state']
api_response_json['error_msg'] = '' if 'error' not in api_response_json else api_response_json['error']
action_result.add_data(api_response_json)
action_result.set_summary(api_response_json)
return action_result.set_status(phantom.APP_SUCCESS, 'Successfully get status of sample with ID: \'{}\''.format(param['id']))
def _check_url_hash_partial(self, param):
config = self.get_config()
api_object = ApiSubmitHashForUrl(config[PAYLOAD_SECURITY_API_KEY], self._base_url, self)
api_object.attach_data(param)
self._make_api_call_with_err_handling(api_object, 'Getting url hash failed.')
return api_object.get_response_json()
def _check_url_hash(self, param):
self.save_progress(PAYLOAD_SECURITY_MSG_QUERYING)
action_result = self.add_action_result(ActionResult(dict(param)))
try:
api_response_json = self._check_url_hash_partial(param)
except VxError as exc:
action_result.set_status(phantom.APP_ERROR, '{}'.format(str(exc)))
return action_result.get_status()
action_result.add_data(api_response_json)
return action_result.set_status(phantom.APP_SUCCESS, 'Successfully get hash of url: \'{}\''.format(param['url']))
def _get_pcap(self, param):
param.update({'file_type': 'pcap'})
return self._get_file(param)
def _get_file(self, param):
config = self.get_config()
api_result_object = ApiReportFile(config[PAYLOAD_SECURITY_API_KEY], self._base_url, self)
action_result = self.add_action_result(ActionResult(dict(param)))
self.save_progress(PAYLOAD_SECURITY_MSG_QUERYING)
api_result_object.attach_params({'id': param['id'], 'type': param['file_type']})
try:
self._make_api_call_with_err_handling(api_result_object, 'Getting file failed.')
except VxError as exc:
action_result.set_status(phantom.APP_ERROR, '{}'.format(str(exc)))
return action_result.get_status()
api_response = api_result_object.get_api_response()
data = self._save_file_to_vault(action_result, api_response, api_response.headers['Vx-Filename'], param['id'], param['file_type'])
data['sample_url'] = self._build_sample_url(param['id'])
action_result.add_data(data)
action_result.set_summary(data)
return action_result.get_status()
def _get_file_from_url(self, param):
action_result = self.add_action_result(ActionResult(dict(param)))
disassembled = urlparse(param['url'])
filename, file_ext = splitext(basename(disassembled.path))
guid = uuid.uuid4()
local_dir = '/vault/tmp/{}'.format(guid)
self.save_progress("Using temp directory: {0}".format(guid))
try:
os.makedirs(local_dir)
except Exception as e:
return action_result.set_status(phantom.APP_ERROR, "Unable to create temporary folder '/vault/tmp'.", e)
f_out_name = local_dir + '/online_file_{}_{}{}'.format(str(time.time()).replace('.', ''), filename, file_ext)
self.save_progress('Fetching data from given url')
file_resp = urllib.urlopen(param['url'])
f_out = open(f_out_name, 'wb')
f_out.write(file_resp.read())
f_out.close()
vault_ret_dict = Vault.add_attachment(f_out_name, self.get_container_id(), file_name=os.path.basename(f_out_name))
data = {}
if vault_ret_dict['succeeded']:
data = {
'vault_id': vault_ret_dict[phantom.APP_JSON_HASH],
'file_name': os.path.basename(f_out_name),
'file_type': file_ext[1:],
}
action_result.set_status(phantom.APP_SUCCESS)
else:
action_result.set_status(phantom.APP_ERROR, phantom.APP_ERR_FILE_ADD_TO_VAULT)
action_result.append_to_message(vault_ret_dict['message'])
shutil.rmtree(local_dir)
action_result.add_data(data)
action_result.set_summary(data)
return action_result.get_status()
def _save_file(self, directory, file_content, filename, suffix):
retrieved_filename_without_gz_ext, retrieved_file_extension = os.path.splitext(filename)
new_file_name = retrieved_filename_without_gz_ext if retrieved_file_extension == '.gz' else filename # As we want to unpack it, put filename without '.gz. extension
f_out_name = directory + '/Falcon_{}_{}_{}'.format(str(time.time()).replace('.', ''), suffix.replace(':', '_'), new_file_name)
if retrieved_file_extension == '.gz':
f_out = open(f_out_name, 'wb')
try:
gzip_file_handle = gzip.GzipFile(fileobj=BytesIO(file_content))
f_out.write(gzip_file_handle.read())
except Exception as e:
f_out_name += retrieved_file_extension
f_out = open(f_out_name, 'wb')
f_out.write(file_content)
f_out.close()
f_out.close()
else:
f_out = open(f_out_name, 'wb')
f_out.write(file_content)
f_out.close()
return f_out_name
def _save_file_to_vault(self, action_result, response, filename, suffix, file_type):
# Create a tmp directory on the vault partition
guid = uuid.uuid4()
local_dir = '/vault/tmp/{}'.format(guid)
self.save_progress("Using temp directory: {0}".format(guid))
try:
os.makedirs(local_dir)
except Exception as e:
return action_result.set_status(phantom.APP_ERROR, "Unable to create temporary folder '/vault/tmp'.", e)
file_path = self._save_file(local_dir, response.content, filename, suffix)
# move the file to the vault
vault_ret_dict = Vault.add_attachment(file_path, self.get_container_id(), file_name=os.path.basename(file_path))
data = {}
if vault_ret_dict['succeeded']:
data = {
'vault_id': vault_ret_dict[phantom.APP_JSON_HASH],
'file_name': os.path.basename(file_path),
'file_type': file_type,
}
action_result.set_status(phantom.APP_SUCCESS)
else:
action_result.set_status(phantom.APP_ERROR, phantom.APP_ERR_FILE_ADD_TO_VAULT)
action_result.append_to_message(vault_ret_dict['message'])
shutil.rmtree(local_dir)
return data
def _get_report_partial(self, param):
config = self.get_config()
api_summary_object = ApiReportSummary(config[PAYLOAD_SECURITY_API_KEY], self._base_url, self)
api_summary_object.attach_params(param)
self._make_api_call_with_err_handling(api_summary_object, 'Getting report failed.')
api_response_json = api_summary_object.get_response_json()
api_response_json['sample_url'] = self._build_sample_url(param['id'])
verdict_label_map = {
'malicious': 'danger',
'suspicious': 'warning',
'no specific threat': 'success',
'whitelisted': 'info',
'no verdict': 'default'
}
if api_response_json['verdict']:
api_response_json['verdict_label'] = verdict_label_map[api_response_json['verdict']]
return {'api_object': api_summary_object, 'prepared_json_response': api_response_json}
def _get_report(self, param):
self.save_progress(PAYLOAD_SECURITY_MSG_QUERYING)
action_result = self.add_action_result(ActionResult(dict(param)))
try:
partial_results = self._get_report_partial(param)
api_response_json = partial_results['prepared_json_response']
except VxError as exc:
action_result.set_status(phantom.APP_ERROR, '{}'.format(str(exc)))
return action_result.get_status()
action_result.add_data(api_response_json)
return action_result.set_status(phantom.APP_SUCCESS, 'Successfully get summary of sample with Id: \'{}\''.format(param['id']))
def _detonation_partial(self, param, detonation_api_object):
api_response_json = detonation_api_object.get_response_json()
if 'sha256' not in api_response_json:
return VxError("Hash not found in API response. Please check spawn.log.")
sample_sha_256 = api_response_json['sha256']
sample_env_id = param['environment_id']
sample_id = '{}:{}'.format(sample_sha_256, sample_env_id)
sample_params = {
'id': sample_id
}
final_check_status_response = None
start_time_of_checking = time.time()
self.save_progress('Successfully submitted chosen element for detonation. Waiting {} seconds to do status checking...'.format(PAYLOAD_SECURITY_DETONATION_QUEUE_TIME_INTERVAL_SECONDS))
for x in range(0, PAYLOAD_SECURITY_DETONATION_QUEUE_NUMBER_OF_ATTEMPTS):
self.debug_print('detonate_debug_print_queue', 'Starting iteration {} of {}. Sleep time is {}.'.format(x, PAYLOAD_SECURITY_DETONATION_QUEUE_NUMBER_OF_ATTEMPTS,
PAYLOAD_SECURITY_DETONATION_QUEUE_TIME_INTERVAL_SECONDS))
time.sleep(PAYLOAD_SECURITY_DETONATION_QUEUE_TIME_INTERVAL_SECONDS)
api_check_state = self._check_status_partial(sample_params)
api_response_json = api_check_state.get_response_json()
final_check_status_response = api_response_json
if api_response_json['state'] == PAYLOAD_SECURITY_SAMPLE_STATE_IN_PROGRESS:
self.save_progress('Submitted element is processed. Waiting {} seconds to do status checking...'.format(PAYLOAD_SECURITY_DETONATION_PROGRESS_TIME_INTERVAL_SECONDS))
for y in range(0, PAYLOAD_SECURITY_DETONATION_PROGRESS_NUMBER_OF_ATTEMPTS):
self.debug_print('detonate_debug_print_progress', 'Starting iteration {} of {}. Sleep time is {}.'.format(y, PAYLOAD_SECURITY_DETONATION_PROGRESS_NUMBER_OF_ATTEMPTS,
PAYLOAD_SECURITY_DETONATION_PROGRESS_TIME_INTERVAL_SECONDS))
time.sleep(PAYLOAD_SECURITY_DETONATION_PROGRESS_TIME_INTERVAL_SECONDS)
api_check_state = self._check_status_partial(sample_params)
api_response_json = api_check_state.get_response_json()
final_check_status_response = api_response_json
self.save_progress(
PAYLOAD_SECURITY_MSG_CHECKED_STATE.format(api_response_json['state'], datetime.now().strftime("%Y-%m-%d %H:%M:%S"), y + 1,
PAYLOAD_SECURITY_DETONATION_PROGRESS_NUMBER_OF_ATTEMPTS,
PAYLOAD_SECURITY_DETONATION_PROGRESS_TIME_INTERVAL_SECONDS))
if api_response_json['state'] in [PAYLOAD_SECURITY_SAMPLE_STATE_SUCCESS, PAYLOAD_SECURITY_SAMPLE_STATE_ERROR]:
self.debug_print('detonate_debug_print_progress_result_status',
'Got state \'{}\' from \'{}\' state after \'{}\' seconds of work.'.format(api_response_json['state'], PAYLOAD_SECURITY_SAMPLE_STATE_IN_PROGRESS,
(time.time() - start_time_of_checking)))
break
else: # 'else' is ran, when iteration was not broken. When it has happen, break also the outer loop.
continue
break
elif api_response_json['state'] == PAYLOAD_SECURITY_SAMPLE_STATE_ERROR:
self.debug_print('detonate_debug_print_queue_result_status',
'Got state \'{}\' from \'{}\' state after \'{}\' seconds of work.'.format(PAYLOAD_SECURITY_SAMPLE_STATE_ERROR, PAYLOAD_SECURITY_SAMPLE_STATE_IN_QUEUE,
(time.time() - start_time_of_checking)))
break
elif api_response_json['state'] == PAYLOAD_SECURITY_SAMPLE_STATE_SUCCESS:
break
else:
self.save_progress(
PAYLOAD_SECURITY_MSG_CHECKED_STATE.format(api_response_json['state'], datetime.now().strftime("%Y-%m-%d %H:%M:%S"), x + 1, PAYLOAD_SECURITY_DETONATION_QUEUE_NUMBER_OF_ATTEMPTS,
PAYLOAD_SECURITY_DETONATION_QUEUE_TIME_INTERVAL_SECONDS))
if final_check_status_response['state'] in [PAYLOAD_SECURITY_SAMPLE_STATE_IN_QUEUE, PAYLOAD_SECURITY_SAMPLE_STATE_IN_PROGRESS]:
raise VxError('Action reached the analysis timeout. Last state is \'{}\'. You can still observe the state using \'check status\' action and after successful analysis, retrieve results by \'get report\' action.'.format(final_check_status_response['state']))
elif final_check_status_response['state'] == PAYLOAD_SECURITY_SAMPLE_STATE_ERROR:
raise VxError('During the analysis, error has occurred: \'{}\'. For more possible information, please visit sample page({}) and/or Hybrid Analysis Knowledge Base.'.format(
final_check_status_response['error'], self._build_sample_url(sample_id)))
else:
self.save_progress(PAYLOAD_SECURITY_MSG_DETONATION_QUERYING_REPORT)
partial_results = self._get_report_partial({'id': sample_id})
return partial_results['prepared_json_response']
def _detonate_url(self, param):
config = self.get_config()
api_submit_file_object = ApiSubmitUrlForAnalysis(config[PAYLOAD_SECURITY_API_KEY], self._base_url, self)
self.save_progress(PAYLOAD_SECURITY_MSG_SUBMITTING_FILE)
action_result = self.add_action_result(ActionResult(dict(param)))
api_submit_file_object.attach_data(param)
try:
self._make_api_call_with_err_handling(api_submit_file_object, 'URL submit failed.')
report_api_json_response = self._detonation_partial(param, api_submit_file_object)
except VxError as exc:
action_result.set_status(phantom.APP_ERROR, '{}'.format(str(exc)))
return action_result.get_status()
action_result.add_data(report_api_json_response)
return action_result.set_status(phantom.APP_SUCCESS, 'Successfully submitted URL and retrieved analysis result. Sample sha256: \'{}\' and environment ID: \'{}\''.format(report_api_json_response['sha256'], param['environment_id']))
def _detonate_file(self, param):
config = self.get_config()
api_submit_file_object = ApiSubmitFile(config[PAYLOAD_SECURITY_API_KEY], self._base_url, self)
self.save_progress(PAYLOAD_SECURITY_MSG_SUBMITTING_FILE)
action_result = self.add_action_result(ActionResult(dict(param)))
return_value, files = self._get_file_dict(param, action_result)
if phantom.is_fail(return_value):
return action_result.get_status()
api_submit_file_object.attach_files(files)
api_submit_file_object.attach_data({'environment_id': param['environment_id']})
try:
self._make_api_call_with_err_handling(api_submit_file_object, 'File submit failed.')
report_api_json_response = self._detonation_partial(param, api_submit_file_object)
except VxError as exc:
action_result.set_status(phantom.APP_ERROR, '{}'.format(str(exc)))
return action_result.get_status()
action_result.add_data(report_api_json_response)
return action_result.set_status(phantom.APP_SUCCESS, 'Successfully submitted file and retrieved analysis result. Sample sha256: \'{}\' and environment ID: \'{}\''.format(report_api_json_response['sha256'], param['environment_id']))
def _detonate_online_file(self, param):
config = self.get_config()
api_submit_file_object = ApiSubmitOnlineFile(config[PAYLOAD_SECURITY_API_KEY], self._base_url, self)
self.save_progress(PAYLOAD_SECURITY_MSG_SUBMITTING_FILE)
action_result = self.add_action_result(ActionResult(dict(param)))
api_submit_file_object.attach_data(param)
try:
self._make_api_call_with_err_handling(api_submit_file_object, 'Online file submit failed.')
report_api_json_response = self._detonation_partial(param, api_submit_file_object)
except VxError as exc:
action_result.set_status(phantom.APP_ERROR, '{}'.format(str(exc)))
return action_result.get_status()
action_result.add_data(report_api_json_response)
return action_result.set_status(phantom.APP_SUCCESS, 'Successfully submitted file and retrieved analysis result. Sample sha256: \'{}\' and environment ID: \'{}\''.format(report_api_json_response['sha256'], param['environment_id']))
def _convert_verdict_name_to_key(self, verdict_name):
return verdict_name.replace(' ', '_')
def _hunt_similar(self, param):
return self._search_terms({'similar_to': param['sha256']})
def _hunt_file(self, param):
return self._search_terms(param)
def _hunt_hash(self, | |
* oO0o % O0 % OoO0O00
if 70 - 70: o0oOOo0O0Ooo + O0 % I1IiiI
oo0o = struct . unpack ( "B" , packet [ 7 : 8 ] ) [ 0 ]
if ( oo0o == 0 ) :
dprint ( "IPv6 packet arrived with hop-limit 0, packet discarded" )
return ( None )
elif ( oo0o == 1 ) :
dprint ( "IPv6 packet {}, packet discarded" . format ( bold ( "ttl expiry" , False ) ) )
if 56 - 56: Ii1I
return ( None )
if 84 - 84: iII111i
if 21 - 21: i11iIiiIii
if 30 - 30: OoO0O00 + OoooooooOO
if 98 - 98: I1ii11iIi11i % I1IiiI
if 9 - 9: o0oOOo0O0Ooo / I1Ii111 % i1IIi - OOooOOo % I1IiiI / I1ii11iIi11i
if ( oO0o0 . is_ipv6_link_local ( ) ) :
dprint ( "Do not encapsulate IPv6 link-local packets" )
return ( None )
if 66 - 66: IiII
if 56 - 56: oO0o + OoooooooOO
oo0o -= 1
packet = packet [ 0 : 7 ] + struct . pack ( "B" , oo0o ) + packet [ 8 : : ]
return ( packet )
if 75 - 75: O0 % Ii1I
if 47 - 47: OoooooooOO - OoooooooOO + OoO0O00 / iIii1I11I1II1
if 23 - 23: iII111i / iIii1I11I1II1
if 5 - 5: O0
if 64 - 64: i1IIi * i1IIi . iII111i - O0 - oO0o % OoooooooOO
if 14 - 14: Ii1I % OoO0O00 % I1Ii111 * O0
if 8 - 8: I1IiiI - i11iIiiIii * I1IiiI
if 6 - 6: O0 - OoOoOO00 - i11iIiiIii / iII111i
def lisp_mac_input ( packet ) :
return ( packet )
if 63 - 63: OOooOOo
if 84 - 84: i11iIiiIii * iIii1I11I1II1 % I11i % iII111i + OoooooooOO . o0oOOo0O0Ooo
if 78 - 78: o0oOOo0O0Ooo . iII111i + O0 / I1ii11iIi11i + I1ii11iIi11i + II111iiii
if 96 - 96: iIii1I11I1II1 * II111iiii . iIii1I11I1II1
if 13 - 13: Ii1I - OoOoOO00 . Ii1I
if 7 - 7: Ii1I - I11i / I1ii11iIi11i + iII111i
if 47 - 47: I11i * IiII / oO0o - OoooooooOO . OoooooooOO / I11i
if 73 - 73: Ii1I . IiII % IiII
if 56 - 56: I1Ii111 + iII111i + iII111i
def lisp_rate_limit_map_request ( source , dest ) :
if ( lisp_last_map_request_sent == None ) : return ( False )
O0OOI11i = lisp_get_timestamp ( )
oO000o0Oo00 = O0OOI11i - lisp_last_map_request_sent
OOoOoOOo0O = ( oO000o0Oo00 < LISP_MAP_REQUEST_RATE_LIMIT )
if 46 - 46: OoO0O00 * I1Ii111 . O0
if ( OOoOoOOo0O ) :
if ( source != None ) : source = source . print_address ( )
dest = dest . print_address ( )
dprint ( "Rate-limiting Map-Request for {} -> {}" . format ( source , dest ) )
if 86 - 86: i11iIiiIii . Ii1I / OoOoOO00 / I11i * i1IIi
return ( OOoOoOOo0O )
if 40 - 40: o0oOOo0O0Ooo
if 33 - 33: i11iIiiIii + I1Ii111 % I1ii11iIi11i - I1Ii111 * OoO0O00
if 1 - 1: II111iiii / I1IiiI + II111iiii % II111iiii - I1Ii111
if 24 - 24: I11i / Oo0Ooo / i1IIi + IiII
if 10 - 10: I11i - IiII / II111iiii / oO0o % O0 / I1Ii111
if 91 - 91: oO0o * OoOoOO00 + O0 % Oo0Ooo
if 62 - 62: iIii1I11I1II1 - i11iIiiIii % iIii1I11I1II1 . ooOoO0o / OOooOOo * OoOoOO00
def lisp_send_map_request ( lisp_sockets , lisp_ephem_port , seid , deid , rloc ) :
global lisp_last_map_request_sent
if 45 - 45: OOooOOo - OOooOOo % iII111i - IiII . O0
if 6 - 6: iIii1I11I1II1 * II111iiii / O0 % IiII - I1Ii111
if 64 - 64: ooOoO0o
if 28 - 28: i11iIiiIii - IiII * I1ii11iIi11i + IiII * iII111i
if 75 - 75: o0oOOo0O0Ooo * OoOoOO00 % I1ii11iIi11i + OOooOOo . II111iiii
if 12 - 12: ooOoO0o
o0O0oOO0o0 = i1iIi1IIIiI1 = None
if ( rloc ) :
o0O0oOO0o0 = rloc . rloc
i1iIi1IIIiI1 = rloc . translated_port if lisp_i_am_rtr else LISP_DATA_PORT
if 5 - 5: OoO0O00 / I1Ii111
if 78 - 78: OoOoOO00 / IiII
if 92 - 92: OoOoOO00 / I11i / I1Ii111
if 2 - 2: IiII - iIii1I11I1II1
if 54 - 54: i11iIiiIii . Ii1I % I1IiiI . I1Ii111 . OoooooooOO
I1I1I1i1II1 , i1iI1IIIi1iIii1 , OoO0o0OOOO = lisp_myrlocs
if ( I1I1I1i1II1 == None ) :
lprint ( "Suppress sending Map-Request, IPv4 RLOC not found" )
return
if 64 - 64: OoOoOO00
if ( i1iI1IIIi1iIii1 == None and o0O0oOO0o0 != None and o0O0oOO0o0 . is_ipv6 ( ) ) :
lprint ( "Suppress sending Map-Request, IPv6 RLOC not found" )
return
if 20 - 20: OoOoOO00 / O0 * OOooOOo % I11i + OoO0O00 + o0oOOo0O0Ooo
if 51 - 51: Ii1I - OoOoOO00 / i11iIiiIii + O0
o00oo00OOOO = lisp_map_request ( )
o00oo00OOOO . record_count = 1
o00oo00OOOO . nonce = lisp_get_control_nonce ( )
o00oo00OOOO . rloc_probe = ( o0O0oOO0o0 != None )
if 71 - 71: ooOoO0o
if 35 - 35: OoOoOO00
if 55 - 55: iII111i - o0oOOo0O0Ooo + IiII * II111iiii
if 6 - 6: I1Ii111 / i1IIi / IiII . o0oOOo0O0Ooo
if 69 - 69: ooOoO0o - OoOoOO00 . I1IiiI . I11i + OoOoOO00 / i11iIiiIii
if 20 - 20: OoO0O00 . OoooooooOO - ooOoO0o . I11i / Oo0Ooo
if 89 - 89: iIii1I11I1II1 . ooOoO0o
if ( rloc ) : rloc . last_rloc_probe_nonce = o00oo00OOOO . nonce
if 82 - 82: OoOoOO00 - II111iiii . OoO0O00 * ooOoO0o
iiI1 = deid . is_multicast_address ( )
if ( iiI1 ) :
o00oo00OOOO . target_eid = seid
o00oo00OOOO . target_group = deid
else :
o00oo00OOOO . target_eid = deid
if 78 - 78: OoOoOO00 % oO0o
if 39 - 39: iIii1I11I1II1
if 72 - 72: II111iiii + I1Ii111 / Ii1I * iIii1I11I1II1
if 95 - 95: OoooooooOO + OOooOOo + II111iiii + IiII + OoO0O00
if 86 - 86: II111iiii / iII111i - I1ii11iIi11i
if 65 - 65: I1ii11iIi11i + OoOoOO00
if 43 - 43: O0 + I11i % II111iiii
if 56 - 56: IiII + Oo0Ooo . IiII % iIii1I11I1II1 % ooOoO0o % ooOoO0o
if 70 - 70: ooOoO0o / i1IIi - I11i - i11iIiiIii
if ( o00oo00OOOO . rloc_probe == False ) :
Ooooo00 = lisp_get_signature_eid ( )
if ( Ooooo00 ) :
o00oo00OOOO . signature_eid . copy_address ( Ooooo00 . eid )
o00oo00OOOO . privkey_filename = "./lisp-sig.pem"
if 79 - 79: OoO0O00 - OoooooooOO % iII111i . O0
if 93 - 93: I1Ii111
if 3 - 3: OoO0O00 / IiII - oO0o / oO0o
if 50 - 50: II111iiii + OoOoOO00
if 17 - 17: ooOoO0o + I1ii11iIi11i
if 34 - 34: Ii1I / II111iiii + OoOoOO00 . II111iiii + OoooooooOO * o0oOOo0O0Ooo
if ( seid == None or iiI1 ) :
o00oo00OOOO . source_eid . afi = LISP_AFI_NONE
else :
o00oo00OOOO . source_eid = seid
if 48 - 48: O0
if 99 - 99: II111iiii * oO0o / I1ii11iIi11i - i1IIi
if 84 - 84: i11iIiiIii . OoooooooOO
if 69 - 69: I1Ii111 * II111iiii % I1Ii111 * i11iIiiIii . ooOoO0o / Oo0Ooo
if 5 - 5: Ii1I
if 19 - 19: oO0o
if 61 - 61: OoOoOO00 + iIii1I11I1II1 / I1ii11iIi11i - i1IIi
if 11 - 11: oO0o * o0oOOo0O0Ooo . I1IiiI
if 12 - 12: I1IiiI % OoO0O00 / I1Ii111 / O0 % o0oOOo0O0Ooo
if 1 - 1: OoOoOO00 / I11i
if 43 - 43: o0oOOo0O0Ooo - i1IIi / Ii1I . OoOoOO00 + i11iIiiIii
if 69 - 69: i11iIiiIii - iIii1I11I1II1
if ( o0O0oOO0o0 != None and lisp_nat_traversal and lisp_i_am_rtr == False ) :
if ( o0O0oOO0o0 . is_private_address ( ) == False ) :
I1I1I1i1II1 = lisp_get_any_translated_rloc ( )
if 40 - 40: I1IiiI / oO0o + | |
# <NAME>
# This module provides classical simulations of basic quantum algorithms (once you write them).
# In numpy, I think that the default complex dtype varies from platform to platform. If you want to explicitly use the default type in your code, use one.dtype (where one is defined just below).
import random
import math
import numpy as np
### CONSTANTS ###
# We haven't discussed this trivial case, but a 0-qbit state or gate is the complex scalar 1, represented as the following object. Notice that this object is neither the column vector numpy.array([1 + 0j]) nor the matrix numpy.array([[1 + 0j]]).
one = np.array(1 + 0j)
# Our favorite one-qbit states.
ket0 = np.array([1 + 0j, 0 + 0j])
ket1 = np.array([0 + 0j, 1 + 0j])
ketPlus = np.array([1 / math.sqrt(2), 1 / math.sqrt(2)])
ketMinus = np.array([1 / math.sqrt(2), -1 / math.sqrt(2)])
# Our favorite one-qbit gates.
iden = np.array([[1 + 0j, 0 + 0j], [0 + 0j, 1 + 0j]])
x = np.array([[0 + 0j, 1 + 0j], [1 + 0j, 0 + 0j]])
y = np.array([[0 + 0j, 0 - 1j], [0 + 1j, 0 + 0j]])
z = np.array([[1 + 0j, 0 + 0j], [0 + 0j, -1 + 0j]])
h = np.array([[1 / math.sqrt(2) + 0j, 1 / math.sqrt(2) + 0j],
[1 / math.sqrt(2) + 0j, -1 / math.sqrt(2) + 0j]])
# Our favorite two-qbit gates.
cnot = np.array([[1 + 0j, 0 + 0j, 0 + 0j, 0 + 0j],
[0 + 0j, 1 + 0j, 0 + 0j, 0 + 0j],
[0 + 0j, 0 + 0j, 0 + 0j, 1 + 0j],
[0 + 0j, 0 + 0j, 1 + 0j, 0 + 0j]])
swap = np.array([[1 + 0j, 0 + 0j, 0 + 0j, 0 + 0j],
[0 + 0j, 0 + 0j, 1 + 0j, 0 + 0j],
[0 + 0j, 1 + 0j, 0 + 0j, 0 + 0j],
[0 + 0j, 0 + 0j, 0 + 0j, 1 + 0j]])
# Our favorite three-qbit gates.
toffoli = np.array([[1 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j],
[0 + 0j, 1 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j],
[0 + 0j, 0 + 0j, 1 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j],
[0 + 0j, 0 + 0j, 0 + 0j, 1 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j],
[0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 1 + 0j, 0 + 0j, 0 + 0j, 0 + 0j],
[0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 1 + 0j, 0 + 0j, 0 + 0j],
[0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 1 + 0j],
[0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 0 + 0j, 1 + 0j, 0 + 0j]])
### BIT STRINGS ###
# We represent an n-bit string --- that is, an element of {0, 1}^n --- in Python as a tuple of 0s and 1s.
def string(n, m):
'''Converts a non-negative Python integer m to its corresponding bit string. As necessary, pads with leading 0s to bring the number of bits up to n.'''
s = ()
while m >= 1:
s = (m % 2,) + s
m = m // 2
s = (n - len(s)) * (0,) + s
return s
def integer(s):
'''Converts a bit string s to its corresponding non-negative Python integer.'''
m = 0
for k in range(len(s)):
m = 2 * m + s[k]
return m
# returns the norm of a complex number
def norm(c):
return math.sqrt(math.pow(c.real, 2) + math.pow(c.imag, 2))
def next(s):
'''Given an n-bit string s, returns the next n-bit string. The order is lexicographic, except that there is a string after 1...1, namely 0...0.'''
k = len(s) - 1
while k >= 0 and s[k] == 1:
k -= 1
if k < 0:
return len(s) * (0,)
else:
return s[:k] + (1,) + (len(s) - k - 1) * (0,)
def firstTest():
# Constructs an unentangled two-qbit state |0> |psi> or |1> |psi>,
# measures the first qbit, and then reconstructs the state.
print("One should see 0s.")
psi = uniform(1)
state = tensor(ket0, psi)
meas = first(state)
print(state - tensor(meas[0], meas[1]))
psi = uniform(1)
state = tensor(ket1, psi)
meas = first(state)
print(state - tensor(meas[0], meas[1]))
def lastTest():
# Constructs an unentangled two-qbit state |0> |psi> or |1> |psi>,
# measures the first qbit, and then reconstructs the state.
print("One should see 0s.")
psi = uniform(1)
state = tensor(ket0, psi)
meas = last(state)
print(state - tensor(meas[0], meas[1]))
psi = uniform(1)
state = tensor(ket1, psi)
meas = last(state)
print(state - tensor(meas[0], meas[1]))
def nextTest(n):
'''A unit test for some basic bit-string routines. Should print the integers from 0 to 2^n - 1.'''
s = string(n, 0)
m = integer(s)
print(m)
s = next(string(n, m))
while s != n * (0,):
m = integer(s)
print(m)
s = next(string(n, m))
def addition(s, t):
"""Returns the mod-2 sum of two n-bit strings s and t."""
return tuple([(s[i] + t[i]) % 2 for i in range(len(s))])
def dot(s, t):
"""Returns the mod-2 dot product of two n-bit strings s and t."""
return sum([s[i] * t[i] for i in range(len(s))]) % 2
def reduction(a):
"""A is a list of m >= 1 bit strings of equal dimension n >= 1. In other words, A is a non-empty m x n binary
matrix. Returns the reduced row-echelon form of A. A itself is left unaltered. """
b = a.copy()
m = len(b)
n = len(b[0])
rank = 0
for j in range(n):
# Try to swap two rows to make b[rank, j] a leading 1.
i = rank
while i < m and b[i][j] == 0:
i += 1
if i != m:
# Perform the swap.
temp = b[i]
b[i] = b[rank]
b[rank] = temp
# Reduce all leading 1s below the one we just made.
for i in range(rank + 1, m):
if b[i][j] == 1:
b[i] = addition(b[i], b[rank])
rank += 1
for j in range(n - 1, -1, -1):
# Try to find the leading 1 in column j.
i = m - 1
while i >= 0 and b[i][j] != 1:
i -= 1
if i >= 0:
# Use the leading 1 at b[i, j] to reduce 1s above it.
for k in range(i):
if b[k][j] == 1:
b[k] = addition(b[k], b[i])
return b
def function(n, m, f):
"""Assumes that n = m = 1. The argument f is a Python function that takesas input an n-bit string alpha and
returns as output an m-bit string f(alpha). See deutschTest for examples of f. This function returns the (n +
m)-qbit gate F that corresponds to f. """
F = np.zeros((2**(n+m),2**(n+m)), dtype=one.dtype)
for a in range(0,2**n):
for b in range(0,2**m):
alpha = string(n,a)
beta = string(m,b)
beta_new = addition(beta,f(alpha))
row_bits = alpha + beta_new
col_bits = alpha + beta
F[integer(row_bits)][integer(col_bits)] = 1 + 0j
return F
def deutsch(f):
"""Given a two-qbit gate representing a function f : {0, 1} -> {0, 1},outputs ket1 if f is constant and ket0 if f
is not constant. """
return first(np.dot(tensor(h, h), np.dot(f, np.dot(tensor(h, h), tensor(ket1, ket1)))))[0]
def deutschTest():
print("One should see ket0s")
def f(x):
return 1 - x[0],
print(deutsch(function(1, 1, f)))
def f(x):
return x
print(deutsch(function(1, 1, f)))
print("One should see ket1s")
def f(x):
return (0,)
print(deutsch(function(1, 1, f)))
def f(x):
return (1,)
print(deutsch(function(1, 1, f)))
def application(gate, state):
"""Assumes n >= 1. Applies the n-qbit gate to the n-qbit state, returning
an n-qbit state."""
| |
i+1 < len(node.jointnames):
code += ', '
else:
code += ';\n'
code += 'int numsolutions = 0;\n'
code += 'for(int i%s = 0; i%s < numroots; i%s += %d)\n{\n'%(firstname,firstname,firstname,len(node.jointnames))
fcode = 'IkReal '
for i in range(len(node.exportvar)):
fcode += '%s = zeror[i%s+%d]'%(node.exportvar[i],firstname,i)
if i+1<len(node.exportvar):
fcode += ', '
else:
fcode += ';\n'
origequations = self.copyequations()
fcode += self.writeEquations(lambda i: '%sarray[numsolutions]'%(node.jointnames[i]), node.jointeval)
fcode += self.writeEquations(lambda i: 'c%sarray[numsolutions]'%(node.jointnames[i]), node.jointevalcos)
fcode += self.writeEquations(lambda i: 's%sarray[numsolutions]'%(node.jointnames[i]), node.jointevalsin)
self.dictequations = origequations
for i in range(len(node.jointnames)):
if node.isHinges[i]:
fcode += 'if( %sarray[numsolutions] > IKPI )\n{\n %sarray[numsolutions]-=IK2PI;\n}\nelse if( %sarray[numsolutions] < -IKPI )\n{\n %sarray[numsolutions]+=IK2PI;\n}\n'%(node.jointnames[i],node.jointnames[i],node.jointnames[i],node.jointnames[i])
fcode += 'numsolutions++;\n'
# fcode += 'bool valid = true;\n'
# # test all the solutions up to now for validity
# fcode += 'for( int k%s = 0; k%s < numsolutions; ++k%s)\n{\n'%(firstname,firstname,firstname)
# fcode += ' if( '
# for name in node.jointnames:
# fcode += 'IKabs(c%sarray[k%s]-c%sarray[numsolutions]) < IKFAST_SOLUTION_THRESH && IKabs(s%sarray[k%s]-s%sarray[numsolutions]) < IKFAST_SOLUTION_THRESH &&'%(name,firstname,name,name, firstname,name)
# fcode += ' 1 )\n {\n valid=false; break;\n }\n'
# fcode += '}\n'
# fcode += 'if( valid ) { numsolutions++; }\n'
fcode += '}\n'
code += self.indentCode(fcode,4)
code += 'bool %svalid[%d]={%s};\n'%(firstname,node.rootmaxdim,','.join(['true']*node.rootmaxdim))
code += '_n%s = %d;\n'%(firstname,node.rootmaxdim)
for name in node.jointnames[1:]:
code += '_n%s = 1;\n'%name
if node.rootmaxdim >= 256:
log.error('num solutions is %d>=256, which exceeds unsigned char',node.rootmaxdim)
code += 'for(int i%s = 0; i%s < numsolutions; ++i%s)\n {\n'%(firstname,firstname,firstname)
code += 'if( !%svalid[i%s] )\n{\n continue;\n}\n'%(firstname,firstname)
code += '_i%s[0] = i%s; _i%s[1] = -1;\n'%(firstname,firstname,firstname)
for name in node.jointnames[1:]:
code += '_i%s[0] = 0; _i%s[1] = -1;\n'%(name,name)
# check for a similar solution
code += 'for(int ii%s = i%s+1; ii%s < numsolutions; ++ii%s)\n{\n'%(firstname,firstname,firstname,firstname)
code += 'if( !%svalid[ii%s] ) { continue; }\n'%(firstname,firstname)
code += 'if( '
for name in node.jointnames:
code += 'IKabs(c%sarray[i%s]-c%sarray[ii%s]) < IKFAST_SOLUTION_THRESH && IKabs(s%sarray[i%s]-s%sarray[ii%s]) < IKFAST_SOLUTION_THRESH && '%(name,firstname,name,firstname,name,firstname,name,firstname)
code += ' 1 )\n{\n %svalid[ii%s]=false; '%(firstname,firstname)
code += '_i%s[1] = ii%s; '%(firstname,firstname)
for name in node.jointnames[1:]:
code += '_i%s[1] = 0; '%name
code += ' break; \n}\n'
code += '}\n'
for name in node.jointnames:
code += ' %s = %sarray[i%s]; c%s = c%sarray[i%s]; s%s = s%sarray[i%s];\n\n'%(name,name,firstname,name,name,firstname,name,name,firstname)
return code
def endCoeffFunction(self, node):
return ' }\n'
def generateMatrixInverse(self, node):
# lapack takes matrices in column order
assert( node.A.shape[0] == node.A.shape[1] )
matrixinverse=self.using_matrixinverse()
code = ''
# for some reason things work even if the determinant is 0....
# if len(node.checkforzeros) > 0:
# code = 'IkReal matrixcondition[%d];\n'%(len(node.checkforzeros))
# code += self.writeEquations(lambda i: 'matrixcondition[%d]'%(i),node.checkforzeros)
# code += 'if( '
# for i in range(len(node.checkforzeros)):
# if i != 0:
# code += ' || '
# code += 'IKabs(matrixcondition[%d]) < 1e-14 '%(i)
# code += ' )\n{ continue;\n}\n'
code += 'IkReal IKFAST_ALIGNED16(matrixinvcoeffs[%d]);\n'%(node.A.shape[0]*node.A.shape[1])
code += self.writeEquations(lambda i: 'matrixinvcoeffs[%d]'%(i),node.A.transpose()[:])
code += 'if( !%s<%d>(matrixinvcoeffs) ) {\ncontinue;\n}\n'%(matrixinverse,node.A.shape[0]);
# create the variables
mcode = ''
for i in range(len(node.Asymbols)):
for j in range(len(node.Asymbols[i])):
if node.Asymbols[i][j] is not None:
if len(mcode) > 0:
mcode += ', '
else:
mcode = 'IkReal '
mcode += '%s=matrixinvcoeffs[%d]'%(node.Asymbols[i][j],i+j*node.A.shape[0])
if len(mcode)> 0:
code += mcode + ';\n'
return code
def endMatrixInverse(self,node):
return ''
def generateBranchConds(self, node):
origequations = self.copyequations()
code = '{\n'
numevals = None
for branch in node.jointbranches:
if branch[0] is not None:
if numevals is None or numevals < len(branch[0]):
numevals=len(branch[0])
if numevals is not None:
code += 'IkReal evalcond[%d];\n'%numevals
for branch in node.jointbranches:
self.dictequations = self.copyequations(origequations)
if branch[0] is None:
branchcode = 'if( 1 )\n{\n'
else:
branchcode = self.writeEquations(lambda x: 'evalcond[%d]'%x,branch[0])
branchcode += 'if( '
for i in range(len(branch[0])):
if i != 0:
branchcode += ' && '
branchcode += 'IKabs(evalcond[%d]) < %.16f '%(i,node.thresh)
branchcode += ' )\n{\n'
for n in branch[1]:
branchcode += n.generate(self)
for n in reversed(branch[1]):
branchcode += n.end(self)
code += self.indentCode(branchcode,4)+'\n} else\n{\n'
code += '}\n'*(len(node.jointbranches)+1)
self.dictequations = origequations
return code
def endBranchConds(self, node):
return ''
def generateCheckZeros(self, node):
origequations = self.copyequations()
name = node.jointname if node.jointname is None else 'dummy'
code = 'IkReal %seval[%d];\n'%(name,len(node.jointcheckeqs))
for var,value in node.dictequations:
code += 'IkReal %s;\n'%var
code += self.writeEquations(lambda k: var,value)
code += self.writeEquations(lambda i: '%seval[%d]'%(name,i),node.jointcheckeqs)
if len(node.jointcheckeqs) > 0:
code += 'if( '
for i in range(len(node.jointcheckeqs)):
if i != 0:
if node.anycondition:
code += ' || '
else:
code += ' && '
code += 'IKabs(%seval[%d]) < %.16f '%(name,i,node.thresh)
code += ' )\n{\n'
self.dictequations = self.copyequations(origequations)
code += self.indentCode(self.generateTree(node.zerobranch),4)
code += '\n} else\n'
code += '{\n'
self.dictequations = self.copyequations(origequations)
code += self.indentCode(self.generateTree(node.nonzerobranch),4)
code += '\n}\n'
self.dictequations = origequations
return '{\n' + self.indentCode(code,4) + '\n}\n'
def endCheckZeros(self, node):
return ''
def generateFreeParameter(self, node):
#print 'free variable ',node.jointname,': ',self.freevars
self.freevars.append(node.jointname)
self.freevardependencies.append((node.jointname,node.jointname))
code = 'IkReal %smul = 1;\n%s=0;\n'%(node.jointname,node.jointname)
return code+self.generateTree(node.jointtree)
def endFreeParameter(self, node):
self.freevars.pop()
self.freevardependencies.pop()
return ''
def generateBreak(self,node):
return 'continue;\n'
def endBreak(self,node):
return ''
def generateRotation(self, node):
if not node.functionid in self.functions:
code = 'inline void rotationfunction%d(IkSolutionListBase<IkReal>& solutions) {\n'%(node.functionid)
code += 'for(int rotationiter = 0; rotationiter < 1; ++rotationiter) {\n'
origequations = self.dictequations
self.resetequations()
listequations = []
names = []
for i in range(3):
for j in range(3):
listequations.append(node.T[i,j])
names.append(Symbol('new_r%d%d'%(i,j)))
code += self.indentCode(self.writeEquations(lambda i: names[i],listequations),4)
code += self.indentCode(self.generateTree(node.jointtree),4)
code += '}\n}'
self.dictequations = origequations
self.functions[node.functionid] = code
return 'rotationfunction%d(solutions);\n'%(node.functionid)
def endRotation(self, node):
return ''
def generateDirection(self, node):
code = ''
listequations = []
names = []
for i in range(3):
listequations.append(node.D[i])
names.append(Symbol('new_r%d%d'%(0,i)))
code += self.writeEquations(lambda i: names[i],listequations)
code += self.generateTree(node.jointtree)
return code
def endDirection(self, node):
return ''
def generateStoreSolution(self, node):
code = ''
if node.checkgreaterzero is not None and len(node.checkgreaterzero) > 0:
origequations = self.copyequations()
code += 'IkReal soleval[%d];\n'%(len(node.checkgreaterzero))
code += self.writeEquations(lambda i: 'soleval[%d]'%(i),node.checkgreaterzero)
code += 'if( '
for i in range(len(node.checkgreaterzero)):
if i != 0:
code += ' && '
code += 'soleval[%d] > %.16f '%(i,node.thresh)
code += ' )\n'
self.dictequations = origequations
code += '{\n'
code += 'std::vector<IkSingleDOFSolutionBase<IkReal> > vinfos(%d);\n'%len(node.alljointvars)
for i,var in enumerate(node.alljointvars):
offsetvalue = '+%.15e'%node.offsetvalues[i] if node.offsetvalues is not None else ''
code += 'vinfos[%d].jointtype = %d;\n'%(i,0x01 if node.isHinge[i] else 0x11)
code += 'vinfos[%d].foffset = %s%s;\n'%(i,var,offsetvalue)
vardeps = [vardep for vardep in self.freevardependencies if vardep[1]==var.name]
if len(vardeps) > 0:
freevarname = vardeps[0][0]
ifreevar = [j for j in range(len(self.freevars)) if freevarname==self.freevars[j]]
code += 'vinfos[%d].fmul = %smul;\n'%(i,var.name)
code += 'vinfos[%d].freeind = %d;\n'%(i,ifreevar[0])
code += 'vinfos[%d].maxsolutions = 0;\n'%(i)
else:
code += 'vinfos[%d].indices[0] = _i%s[0];\n'%(i,var)
code += 'vinfos[%d].indices[1] = _i%s[1];\n'%(i,var)
code += 'vinfos[%d].maxsolutions = _n%s;\n'%(i,var)
code += 'std::vector<int> vfree(%d);\n'%len(self.freevars)
for i,varname in enumerate(self.freevars):
ind = [j for j in range(len(node.alljointvars)) if varname==node.alljointvars[j].name]
code += 'vfree[%d] = %d;\n'%(i,ind[0])
code += 'solutions.AddSolution(vinfos,vfree);\n'
code += '}\n'
return code
def endStoreSolution(self, node):
return ''
def generateSequence(self, node):
code = ''
for tree in node.jointtrees:
code += self.generateTree(tree)
return code
def endSequence(self, node):
return ''
def generateTree(self,tree):
code = ''
for n in tree:
code += n.generate(self)
for n in reversed(tree):
code += n.end(self)
return code
def writeEquations(self, varnamefn, allexprs):
if not hasattr(allexprs,'__iter__') and not hasattr(allexprs,'__array__'):
allexprs = [allexprs]
code = ''
# calling cse on many long expressions will freeze it, so try to divide the problem
complexitysubs = [(Symbol('POW'),1),(Symbol('ADD'),1),(Symbol('MUL'),1)]
complexity = [expr.count_ops().subs(complexitysubs) for expr in allexprs]
complexitythresh = 4000
exprs = []
curcomplexity = 0
for i,expr in enumerate(allexprs):
curcomplexity += complexity[i]
exprs.append(expr)
if curcomplexity > complexitythresh or i == len(allexprs)-1:
code += self._writeEquations(varnamefn,exprs,i+1-len(exprs))
exprs = []
curcomplexity = 0
assert(len(exprs)==0)
return code
def _writeEquations(self, varnamefn, exprs,ioffset):
code = ''
replacements,reduced_exprs = customcse(exprs,symbols=self.symbolgen)
N = len(self.dictequations[0])
complexitysubs = [(Symbol('POW'),1),(Symbol('ADD'),1),(Symbol('MUL'),1)]
for rep in replacements:
comparerep = rep[1].subs(self.dictequations[0]).expand()
found = False
complexity = rep[1].count_ops().subs(complexitysubs)
maxcomplexity = 3 if N > 1000 else 2
if complexity > maxcomplexity: # check only long expressions
for i in range(N):
if self.dictequations[1][i] is not None and comparerep-self.dictequations[1][i]==S.Zero:
#self.dictequations.append((rep[0],self.dictequations[0][i][0],self.dictequations[1][i]))
code += 'IkReal %s=%s;\n'%(rep[0],self.dictequations[0][i][0])
found = True
break
else:
comparerep = None
if not found:
self.dictequations[0].append(rep)
self.dictequations[1].append(comparerep)
code2,sepcode2 = self.writeExprCode(rep[1])
code += sepcode2+'IkReal %s=%s;\n'%(rep[0],code2)
for i,rexpr in enumerate(reduced_exprs):
code2,sepcode2 = self.writeExprCode(rexpr)
code += sepcode2+'%s=%s;\n'%(varnamefn(i+ioffset), code2)
return code
def writeExprCode(self, expr):
# go through all arguments and chop them
code = ''
sepcode = ''
if expr.is_Function:
if expr.func == abs:
code += 'IKabs('
code2,sepcode = self.writeExprCode(expr.args[0])
code += code2
elif expr.func == | |
Variable(source))
labels = to_gpu(args.cuda, Variable(torch.zeros(source.size(0)).fill_(whichclass-1)))
# Train
code = autoencoder(0, source, lengths, noise=False, encode_only=True).detach()
scores = classifier(code[:,nhidden_irr:]) if rel else classifier(code[:,:nhidden_irr])
classify_loss = F.binary_cross_entropy(scores.squeeze(1), labels)
classify_loss.backward()
optimizer_classify.step()
classify_loss = classify_loss.cpu().item()
pred = scores.data.round().squeeze(1)
accuracy = pred.eq(labels.data).float().mean()
return classify_loss, accuracy
def grad_hook_cla(grad):
return grad * args.lambda_class
def classifier_regularize(whichclass, batch):
autoencoder.train()
autoencoder.zero_grad()
source, target, lengths = batch
source = to_gpu(args.cuda, Variable(source))
target = to_gpu(args.cuda, Variable(target))
flippedclass = abs(2-whichclass)
labels = to_gpu(args.cuda, Variable(torch.zeros(source.size(0)).fill_(flippedclass)))
# Train
code = autoencoder(0, source, lengths, noise=False, encode_only=True)
code.register_hook(grad_hook_cla)
scores = classifier(code[:,:nhidden_irr])
classify_reg_loss = F.binary_cross_entropy(scores.squeeze(1), labels)
if args.motivator is not None:
motivator_scores = motivator(code[:, nhidden_irr:])
motivator_reg_loss = F.binary_cross_entropy(motivator_scores.squeeze(1), labels)
classify_reg_loss = classify_reg_loss - args.motivator * motivator_reg_loss
if args.surrogate_joint is not None:
out_gold = victim_classifier(source).detach()
out_surrogate = surrogate(code)
surrogate_reg_loss = F.binary_cross_entropy(out_surrogate, out_gold)
classify_reg_loss = classify_reg_loss + args.surrogate_joint * surrogate_reg_loss
classify_reg_loss.backward()
torch.nn.utils.clip_grad_norm(autoencoder.parameters(), args.clip)
optimizer_ae.step()
return classify_reg_loss
def evaluate_autoencoder(whichdecoder, data_source, epoch):
# Turn on evaluation mode which disables dropout.
autoencoder.eval()
total_loss = 0
ntokens = len(corpus.dictionary.word2idx)
all_accuracies = 0
bcnt = 0
for i, batch in enumerate(data_source):
source, target, lengths = batch
source = to_gpu(args.cuda, Variable(source, volatile=True))
target = to_gpu(args.cuda, Variable(target, volatile=True))
mask = target.gt(0)
masked_target = target.masked_select(mask)
# examples x ntokens
output_mask = mask.unsqueeze(1).expand(mask.size(0), ntokens)
hidden = autoencoder(0, source, lengths, noise=False, encode_only=True)
# output: batch x seq_len x ntokens
if whichdecoder == 1:
output = autoencoder(1, source, lengths, noise=False)
flattened_output = output.view(-1, ntokens)
masked_output = \
flattened_output.masked_select(output_mask).view(-1, ntokens)
# accuracy
max_vals1, max_indices1 = torch.max(masked_output, 1)
all_accuracies += \
torch.mean(max_indices1.eq(masked_target).float()).item()
max_values1, max_indices1 = torch.max(output, 2)
max_indices2 = autoencoder.generate(2, hidden, maxlen=50)
else:
output = autoencoder(2, source, lengths, noise=False)
flattened_output = output.view(-1, ntokens)
masked_output = \
flattened_output.masked_select(output_mask).view(-1, ntokens)
# accuracy
max_vals2, max_indices2 = torch.max(masked_output, 1)
all_accuracies += \
torch.mean(max_indices2.eq(masked_target).float()).item()
max_values2, max_indices2 = torch.max(output, 2)
max_indices1 = autoencoder.generate(1, hidden, maxlen=50)
total_loss += criterion_ce(masked_output/args.temp, masked_target).item()
bcnt += 1
aeoutf_from = "{}/{}_output_decoder_{}_from.txt".format(args.outf, epoch, whichdecoder)
aeoutf_tran = "{}/{}_output_decoder_{}_tran.txt".format(args.outf, epoch, whichdecoder)
with open(aeoutf_from, 'w') as f_from, open(aeoutf_tran,'w') as f_trans:
max_indices1 = \
max_indices1.view(output.size(0), -1).data.cpu().numpy()
max_indices2 = \
max_indices2.view(output.size(0), -1).data.cpu().numpy()
target = target.view(output.size(0), -1).data.cpu().numpy()
tran_indices = max_indices2 if whichdecoder == 1 else max_indices1
for t, tran_idx in zip(target, tran_indices):
# real sentence
chars = " ".join([corpus.dictionary.idx2word[x] for x in t])
f_from.write(chars)
f_from.write("\n")
# transfer sentence
chars = " ".join([corpus.dictionary.idx2word[x] for x in tran_idx])
f_trans.write(chars)
f_trans.write("\n")
return total_loss / len(data_source), all_accuracies/bcnt
def evaluate_generator(whichdecoder, noise, epoch):
gan_gen.eval()
autoencoder.eval()
# generate from fixed random noise
fake_hidden = gan_gen(noise)
max_indices = \
autoencoder.generate(whichdecoder, fake_hidden, maxlen=50, sample=args.sample)
with open("%s/%s_generated%d.txt" % (args.outf, epoch, whichdecoder), "w") as f:
max_indices = max_indices.data.cpu().numpy()
for idx in max_indices:
# generated sentence
words = [corpus.dictionary.idx2word[x] for x in idx]
# truncate sentences to first occurrence of <eos>
truncated_sent = []
for w in words:
if w != '<eos>':
truncated_sent.append(w)
else:
break
chars = " ".join(truncated_sent)
f.write(chars)
f.write("\n")
def train_ae(whichdecoder, batch, total_loss_ae, start_time, i):
autoencoder.train()
optimizer_ae.zero_grad()
source, target, lengths = batch
source = to_gpu(args.cuda, Variable(source))
target = to_gpu(args.cuda, Variable(target))
mask = target.gt(0)
masked_target = target.masked_select(mask)
output_mask = mask.unsqueeze(1).expand(mask.size(0), ntokens)
output = autoencoder(whichdecoder, source, lengths, noise=True)
flat_output = output.view(-1, ntokens)
masked_output = flat_output.masked_select(output_mask).view(-1, ntokens)
loss = criterion_ce(masked_output/args.temp, masked_target)
loss.backward()
# `clip_grad_norm` to prevent exploding gradient in RNNs / LSTMs
torch.nn.utils.clip_grad_norm(autoencoder.parameters(), args.clip)
optimizer_ae.step()
total_loss_ae += loss.data
accuracy = None
if i % args.log_interval == 0 and i > 0:
probs = F.softmax(masked_output, dim=-1)
max_vals, max_indices = torch.max(probs, 1)
accuracy = torch.mean(max_indices.eq(masked_target).float()).item()
cur_loss = total_loss_ae.item() / args.log_interval
elapsed = time.time() - start_time
print('| epoch {:3d} | {:5d}/{:5d} batches | ms/batch {:5.2f} | '
'loss {:5.2f} | ppl {:8.2f} | acc {:8.2f}'
.format(epoch, i, len(train1_data),
elapsed * 1000 / args.log_interval,
cur_loss, math.exp(cur_loss), accuracy))
with open("{}/log_{}.txt".format(args.outf, suffix_name), 'a') as f:
f.write('| epoch {:3d} | {:5d}/{:5d} batches | ms/batch {:5.2f} | '
'loss {:5.2f} | ppl {:8.2f} | acc {:8.2f}\n'.
format(epoch, i, len(train1_data),
elapsed * 1000 / args.log_interval,
cur_loss, math.exp(cur_loss), accuracy))
total_loss_ae = 0
start_time = time.time()
return total_loss_ae, start_time
def train_gan_g():
gan_gen.train()
gan_gen.zero_grad()
noise = to_gpu(args.cuda,
Variable(torch.ones(args.batch_size, args.z_size)))
noise.data.normal_(0, 1)
fake_hidden = gan_gen(noise)
errG = gan_disc(fake_hidden)
errG.backward(one)
optimizer_gan_g.step()
return errG
def grad_hook(grad):
return grad * args.grad_lambda
''' Steal from https://github.com/caogang/wgan-gp/blob/master/gan_cifar10.py '''
def calc_gradient_penalty(netD, real_data, fake_data):
bsz = real_data.size(0)
alpha = torch.rand(bsz, 1)
alpha = alpha.expand(bsz, real_data.size(1)) # only works for 2D XXX
alpha = alpha.cuda()
interpolates = alpha * real_data + ((1 - alpha) * fake_data)
interpolates = Variable(interpolates, requires_grad=True)
disc_interpolates = netD(interpolates)
gradients = torch.autograd.grad(outputs=disc_interpolates, inputs=interpolates,
grad_outputs=torch.ones(disc_interpolates.size()).cuda(),
create_graph=True, retain_graph=True, only_inputs=True)[0]
gradients = gradients.view(gradients.size(0), -1)
gradient_penalty = ((gradients.norm(2, dim=1) - 1) ** 2).mean() * args.gan_gp_lambda
return gradient_penalty
def train_gan_d(whichdecoder, batch):
gan_disc.train()
optimizer_gan_d.zero_grad()
# positive samples ----------------------------
# generate real codes
source, target, lengths = batch
source = to_gpu(args.cuda, Variable(source))
target = to_gpu(args.cuda, Variable(target))
# batch_size x nhidden
real_hidden = autoencoder(whichdecoder, source, lengths, noise=False, encode_only=True)
# loss / backprop
errD_real = gan_disc(real_hidden)
errD_real.backward(one)
# negative samples ----------------------------
# generate fake codes
noise = to_gpu(args.cuda,
Variable(torch.ones(args.batch_size, args.z_size)))
noise.data.normal_(0, 1)
# loss / backprop
fake_hidden = gan_gen(noise)
errD_fake = gan_disc(fake_hidden.detach())
errD_fake.backward(mone)
# gradient penalty
gradient_penalty = calc_gradient_penalty(gan_disc, real_hidden.data, fake_hidden.data)
gradient_penalty.backward()
optimizer_gan_d.step()
errD = -(errD_real - errD_fake)
return errD, errD_real, errD_fake
def train_gan_d_into_ae(whichdecoder, batch):
autoencoder.train()
optimizer_ae.zero_grad()
source, target, lengths = batch
source = to_gpu(args.cuda, Variable(source))
target = to_gpu(args.cuda, Variable(target))
real_hidden = autoencoder(whichdecoder, source, lengths, noise=False, encode_only=True)
real_hidden.register_hook(grad_hook)
errD_real = gan_disc(real_hidden)
errD_real.backward(mone)
torch.nn.utils.clip_grad_norm(autoencoder.parameters(), args.clip)
optimizer_ae.step()
return errD_real
if args.surrogate or args.surrogate_perturb:
surrogate = MLP_Classify(args.nhidden, 1, args.surrogate_layers, gpu=args.cuda)
victim_classifier = EmbeddingClassifier(args.victim_emsize, ntokens, 1)
if args.cuda:
surrogate = surrogate.cuda()
victim_classifier = victim_classifier.cuda()
# load the autoencoder
print('Loading models ...')
with open('{}/autoencoder_model{}.pt'.format(args.outf, args.load_suffix), 'rb') as f:
autoencoder.load_state_dict(torch.load(f))
with open('{}/gan_gen_model{}.pt'.format(args.outf, args.load_suffix), 'rb') as f:
gan_gen.load_state_dict(torch.load(f))
with open('{}/gan_disc_model{}.pt'.format(args.outf, args.load_suffix), 'rb') as f:
gan_disc.load_state_dict(torch.load(f))
# load the victim
with open('{}/victim/{}.pt'.format(args.outf, args.victim_load), 'rb') as f:
victim_classifier.load_state_dict(torch.load(f))
if args.surrogate:
# training the surrogate classifier
surrogate_optimizer = optim.Adam(surrogate.parameters(),
lr=args.lr_surrogate,
betas=(args.beta1, 0.999))
os.makedirs('{}/surrogate'.format(args.outf), exist_ok=True)
for epoch in range(1, args.epochs + 1):
niter = 0
tot_loss = 0.0
tot_sample_loss = 0.0
while niter < len(train1_data) and niter < len(train2_data):
loss1 = play_with_surrogate(surrogate, victim_classifier, surrogate_optimizer, train1_data[niter])
loss2 = play_with_surrogate(surrogate, victim_classifier, surrogate_optimizer, train2_data[niter])
loss = (loss1 + loss2) / 2
if args.surrogate_samples != 0:
fixed_noise = to_gpu(args.cuda,
Variable(torch.ones(args.surrogate_samples, args.z_size)))
fixed_noise.normal_(0, 1)
gan_gen.eval()
sample_hidden = gan_gen(fixed_noise)
max_indices = autoencoder.generate(1, sample_hidden, 50).cpu().numpy()
max_indices = [clean_tokens([corpus.dictionary.word2idx['<bos>']] \
+ list(sent_idx)) for sent_idx in max_indices]
lens = list(map(len, max_indices))
mx_len = max(lens)
for sent_idx in max_indices:
for i in range(len(sent_idx), mx_len):
sent_idx.append(corpus.dictionary.word2idx['<pad>'])
max_indices = torch.tensor(max_indices, dtype=torch.int64)
lens = torch.tensor(lens, dtype=torch.int64)
sample_loss = play_with_surrogate(surrogate, victim_classifier, surrogate_optimizer, (max_indices, None, lens), code=sample_hidden)
tot_sample_loss += sample_loss
tot_loss += loss
niter += 1
if niter % 100 == 0:
if args.surrogate_samples == 0:
print('* Iter %d, loss %.5f' % (niter, tot_loss / 100))
else:
print('* Iter %d, loss %.5f, sample loss %.5f' % (niter, tot_loss / 100, \
tot_sample_loss / 100))
tot_loss = 0.0
tot_sample_loss = 0.0
test_loss = evaluate_surrogate(surrogate, victim_classifier)
print('Epoch %d: %.5f' % (epoch, test_loss))
train1_data = batchify(corpus.data['train1'], args.batch_size, shuffle=True)
train2_data = batchify(corpus.data['train2'], args.batch_size, shuffle=True)
with open('{}/surrogate/surrogate{}.pt'.format(args.outf, epoch), 'wb') as fout:
torch.save(surrogate.state_dict(), fout)
else:
# perturb with trained surrogate classifier
def perturb(whichclass, source, target, length):
source = source.unsqueeze(0)
target = target.unsqueeze(0)
source = to_gpu(args.cuda, Variable(source))
autoencoder.eval()
surrogate.eval()
surrogate.zero_grad()
victim_classifier.eval()
gan_disc.eval()
gan_disc.zero_grad()
code = autoencoder(0, source, [length], noise=False, encode_only=True).detach()
irrelevant = code[:,:nhidden_irr]
relevant = code[:,nhidden_irr:]
if args.random_perturb:
perturbed_code = torch.cat((irrelevant + torch.sign(torch.randn_like(irrelevant)) * args.budget, relevant), 1)
else:
irrelevant.requires_grad = True
labels = to_gpu(args.cuda, Variable(torch.zeros(source.size(0)).fill_(whichclass-1)))
code = torch.cat((irrelevant, relevant), 1)
out = surrogate(code)
loss = F.binary_cross_entropy(out, labels)
if args.surrogate_disc_w is not None:
loss = loss + args.surrogate_disc_w * gan_disc(code)
loss.backward()
perturbed_code = torch.cat((irrelevant + torch.sign(irrelevant.grad) * args.budget, relevant), 1)
return autoencoder.generate(whichclass, perturbed_code, 50).squeeze(0)
with open('{}/surrogate/{}.pt'.format(args.outf, args.surrogate_load), 'rb') as f:
surrogate.load_state_dict(torch.load(f))
success = 0
tot_cnt = 0
for label, data in [(0, test1_data), (1, test2_data)]:
for batch in data:
source, target, length = batch
for isource, itarget, ilength in zip(source, target, length):
adversarial_idx = perturb(label, isource, itarget, ilength)
clean_original = clean_tokens(list(isource.cpu().numpy()))
clean_perturbed = clean_tokens([corpus.dictionary.word2idx['<bos>']] + list(adversarial_idx.cpu().numpy()))
chars_original = " ".join([corpus.dictionary.idx2word[x] for x in clean_original])
chars = | |
#!/usr/bin/python
# -*- coding: utf-8
import unittest
import os
import tempfile
from TestBase import *
from gp.client import *
TestGraphName = 'test' + str(os.getpid())
TestFilePrefix = '/tmp/gptest-' + str(os.getpid())
class ServerTest (ClientTestBase, unittest.TestCase):
"""Test server functions via client lib."""
def test_createGraph(self):
"""Graph management functions"""
global TestGraphName
name = TestGraphName + "_2"
# create the graph
self.gp.create_graph(name)
#make sure we can't create it twice
ok = self.gp.try_create_graph(name)
self.assertFalse( ok, "should not be able to create graph again when it already exists" )
# see if we can use the graph from another connection
gp2 = self.newConnection()
gp2.use_graph(name)
# see if we can drop the graph while it's used
self.gp.drop_graph(name)
#TODO: gp2 should now report errors, because the grpah is gone. test that.
# see if we can re-create the graph after it was dropped
self.gp.create_graph(name)
self.gp.drop_graph(name)
#TODO: test name restrictions
def test_createNameRestrictions(self):
global TestGraphName
self.gp.strictArguments = False
# disable strict client-side validation
try:
n = ''
ok = self.gp.create_graph(n)
self.fail("empty graph names should be forbidden!" )
except gpException, ex:
pass
# ok
n = '1337'
ok = self.gp.try_create_graph(n)
self.assertFalse(ok, "numeric graph names should be forbidden! (name: `" + n + "`)" )
n = '1337' + TestGraphName
ok = self.gp.try_create_graph(n)
self.assertFalse( ok,
"graph names starting with a number should be forbidden! (name: `"
+ n + "`)" )
chars = " \r\n\t\0\x09^!\"§\$%&/()[]\ \ =?'#`\\*+~.:, ;<>|@"
for ch in chars:
try:
n = TestGraphName + ch + "test"
ok = self.gp.create_graph(n)
self.fail("graph names containing `"
+ ch + "` should be forbidden! (name: `"
+ n + "`)" )
except gpException, ex:
pass
# ok
try:
n = ch + TestGraphName
ok = self.gp.create_graph(n)
self.fail("graph names starting with `"
+ ch + "` should be forbidden! (name: `" + n + "`)")
except gpException, ex:
pass
# ok
n = 'test1337' + TestGraphName
ok = self.gp.try_create_graph(n)
self.assertEquals( 'OK', ok,
"graph names containing numbers should be allowd! (name: `"
+ n+ "`)")
self.gp.try_drop_graph(n)
chars = '-_8'
for ch in chars:
n = 'test' + ch + TestGraphName
ok = self.gp.try_create_graph(n)
self.assertEquals( 'OK', ok, "graph names containing `"
+ ch + "` should be allowd! (name: `" + n + "`)")
self.gp.try_drop_graph(n)
def test_dropGraph(self):
global TestGraphName
name = TestGraphName + "_2"
self.gp.create_graph(name)
self.gp.drop_graph(name)
ok = self.gp.try_use_graph(name)
self.assertFalse( ok,
"should not be able to use graph after dropping it" )
ok = self.gp.try_drop_graph(name)
self.assertEquals( 'NONE', ok, "should not be able to drop "
+ "graph again after it was already dropped." )
def test_listGraphs(self):
global TestGraphName
gp2 = self.newConnection()
graphs = gp2.capture_list_graphs()
graphs = extract_array_column(graphs, 0)
self.assertTrue( TestGraphName in graphs,
"test table TestGraphName should be in the list" )
self.gp.drop_graph(TestGraphName)
graphs = gp2.capture_list_graphs()
#print "graphs: " . var_export($graphs, true) . "\n"
graphs = extract_array_column( graphs, 0 )
#print "graphs: " . var_export($graphs, true) . "\n"
#print "containes: " . var_export(ConnectionTestBase::setContains( $graphs, TestGraphName ), true) . "\n"
self.assertFalse(
ConnectionTestBase.setContains(graphs, TestGraphName),
"test table TestGraphName should no longer be in the list" )
def test_shutdown(self):
global TestGraphName
gp2 = self.newConnection()
gp2.use_graph(TestGraphName)
gp2.stats()
self.assertSessionValue('ConnectedGraph', TestGraphName)
self.gp.shutdown() # <------------------
# self.assertSessionValue('ConnectedGraph', 'None');
# nice, but not reliable. race condition.
self.gp.try_stats()
self.assertEquals( 'FAILED', self.gp.getStatus(),
'fetching stats should fail after shutdown' )
gp2.try_stats()
self.assertEquals( 'FAILED', gp2.getStatus(),
'fetching stats should fail after shutdown' )
gp2.close()
gp3 = self.newConnection()
gp3.try_use_graph(TestGraphName)
self.assertEquals( 'FAILED', gp3.getStatus(),
'graph should be unavailable after shutdown' )
gp3.close()
def test_quit(self):
global TestGraphName
gp2 = self.newConnection()
gp2.use_graph(TestGraphName)
gp2.stats()
self.assertSessionValue('ConnectedGraph', TestGraphName)
self.gp.quit() # <------------------
self.assertStatus('OK')
try:
self.gp.try_stats()
self.fail( 'connection should be unusable after quit' )
except gpProtocolException, e:
pass
# ok
gp2.stats()
self.assertEquals( 'OK', gp2.getStatus(),
'connection should still be usable by others after quit; response: %s' % gp2.getResponse() )
gp2.close()
gp3 = self.newConnection()
gp3.use_graph(TestGraphName)
self.assertEquals( 'OK', gp3.getStatus(),
'graph should still be available to others after quit; response: %s' % gp2.getResponse() )
gp3.close()
# privileges
def test_createGraphPrivilege(self):
global TestGraphName
global TestAdmin, TestAdminPassword
global TestMaster, TestMasterPassword
name = TestGraphName + "_2"
gp = self.newConnection()
ok = gp.try_create_graph(name)
self.assertFalse( ok,
"should not be able to create a graph without authorizing" )
gp.authorize('password',
TestMaster + ":" + TestMasterPassword)
ok = gp.try_create_graph(name)
self.assertFalse( ok,
"should not be able to create a graph without admin privileges" )
gp.authorize('password',
TestAdmin + ":" + TestAdminPassword)
# re-authenticate
ok = gp.create_graph(name)
self.assertEquals( ok, 'OK',
"should be able to create graph with admin privileges; response: %s" % gp.getResponse() )
gp.try_drop_graph(name)
# cleanup
def test_dropGraphPrivilege(self):
global TestGraphName
global TestAdmin, TestAdminPassword
global TestMaster, TestMasterPassword
name = TestGraphName
gp = self.newConnection()
ok = gp.try_drop_graph(name)
self.assertFalse( ok, "should not be able to drop a graph without authorizing" )
gp.authorize('password',
TestMaster + ":" + TestMasterPassword)
ok = gp.try_drop_graph(name)
self.assertFalse( ok,
"should not be able to drop a graph without admin privileges" )
gp.authorize('password',
TestAdmin + ":" + TestAdminPassword)
# re-authenticate
ok = gp.drop_graph(name)
self.assertEquals( ok, 'OK',
"should be able to drop graph with admin privileges; response: %s" % gp.getResponse() )
def test_inputPipingPrivilege(self):
global TestGraphName, TestGraphServHost
global TestAdmin, TestAdminPassword
global TestMaster, TestMasterPassword
#XXX: this uses local files, so it will always fail
# if the server isn't on localhost!
if TestGraphServHost != 'localhost':
return None
f = os.path.dirname(os.path.abspath(__file__)) + '/gp.test.data'
gp = self.newConnection()
gp.use_graph(TestGraphName)
gp.allowPipes = True
gp.authorize('password',
TestMaster + ":" + TestMasterPassword)
try:
ok = gp.execute("add-arcs < " + f)
self.fail(
"should not be able to pipe without admin privileges!" )
except gpProcessorException, ex:
self.assertEquals( 'DENIED', gp.getStatus(),
"piping should be denied, not fail. Message: "
+ str(ex))
gp.authorize('password', Test<PASSWORD> + ":" + TestAdminPassword)
# re-authenticate
ok = gp.execute("add-arcs < " + f)
self.assertEquals( ok, 'OK',
"should be able to pipe with admin privileges; response: %s" % gp.getResponse() )
def test_outputPipingPrivilege(self):
global TestGraphName, TestGraphServHost
global TestAdmin, TestAdminPassword
global TestMaster, TestMasterPassword
#XXX: this uses local files, so it will always fail
# if the server isn't on localhost!
if TestGraphServHost != 'localhost':
return None
f = tempfile.mktemp(suffix='gpt')
gp = self.newConnection()
gp.use_graph(TestGraphName)
gp.allowPipes = True
try:
ok = gp.execute("list-roots > " + f)
self.fail(
"should not be able to pipe without admin privileges!" )
except gpProcessorException, ex:
self.assertEquals( 'DENIED', gp.getStatus(),
"piping should be denied, not fail. Message: "
+ str(ex))
gp.authorize(
'password', TestAdmin + ":" + TestAdminPassword)
# re-authenticate
ok = gp.execute("list-roots > " + f)
self.assertEquals(
ok, 'OK', "should be able to pipe with admin privileges; response: %s" % gp.getResponse() )
try:
unlink(f)
# cleanup
except:
pass
def test_addArcsPrivilege(self):
global TestGraphName
global TestMaster, TestMasterPassword
gp = self.newConnection()
gp.use_graph(TestGraphName)
ok = gp.try_add_arcs(((1, 11 ), (1, 12 ) ) )
self.assertFalse(
ok, "should not be able to add arcs without authorizing" )
self.assertEquals('DENIED', gp.getStatus(),
"command should be denied, not fail" )
gp.authorize('password',
TestMaster + ":" + TestMasterPassword)
ok = gp.try_add_arcs(((1, 11 ), (1, 12 ) ) )
self.assertEquals( 'OK', ok,
"should be able to add arcs with updater privileges; response: %s" % gp.getResponse() )
def test_removeArcsPrivilege(self):
global TestGraphName
global TestMaster, TestMasterPassword
self.gp.add_arcs(((1, 11 ), (1, 12 ) ) )
# add some arcs as admin
gp = self.newConnection()
gp.use_graph(TestGraphName)
ok = gp.try_remove_arcs(((1, 11 ), ) )
self.assertFalse( ok,
"should not be able to delete arcs without authorizing" )
self.assertEquals( 'DENIED', gp.getStatus(),
"command should be denied, not fail" )
gp.authorize('password',
TestMaster + ":" + TestMasterPassword)
ok = gp.try_remove_arcs(((1, 11 ), ) )
self.assertEquals( 'OK', ok,
"should be able to delete arcs with updater privileges; response: %s" % gp.getResponse() )
def test_replaceSuccessorsPrivilege(self):
global TestGraphName
global TestMaster, TestMasterPassword
self.gp.add_arcs(((1, 11 ), (1, 12 ) ) )
# add some arcs as admin
gp = self.newConnection()
gp.use_graph(TestGraphName)
ok = gp.try_replace_successors( 1, (17, ) )
self.assertFalse( ok,
"should not be able to replace arcs without authorizing" )
self.assertEquals( 'DENIED', gp.getStatus(),
"command should be denied, not fail" )
gp.authorize('password',
TestMaster + ":" + TestMasterPassword)
ok = gp.try_replace_successors( 1, (17, ) )
self.assertEquals( 'OK', ok,
"should be able to replace arcs with updater privileges; response: %s" % gp.getResponse() )
def test_replacePredecessorsPrivilege(self):
global TestGraphName
global TestMaster, TestMasterPassword
self.gp.add_arcs(((1, 11 ), (1, 12 ) ) )
# add some | |
element with a list
if isinstance(estimates, np.ndarray) and len(estimates.shape)==1:
# XXX ??? so we are going away from inplace modifications?
estimates = list(estimates)
rangev = None
for i in xrange(len(estimates)):
v = estimates[i]
if np.isscalar(v):
if Nlabels == 1:
# ensure the right dimensionality
estimates[i] = np.array(v, ndmin=2)
elif Nlabels == 2:
def last_el(x):
"""Helper function. Returns x if x is scalar, and
last element if x is not (ie list/tuple)"""
if np.isscalar(x): return x
else: return x[-1]
if rangev is None:
# we need to figure out min/max estimates
# to invert for the 0th label
estimates_ = [last_el(x) for x in estimates]
rangev = np.min(estimates_) + np.max(estimates_)
estimates[i] = [rangev - v, v]
else:
raise ValueError, \
"Cannot have a single 'value' for multiclass" \
" classification. Got %s" % (v)
elif len(v) != Nlabels:
raise ValueError, \
"Got %d estimates whenever there is %d labels" % \
(len(v), Nlabels)
# reassign possibly adjusted estimates
sets_wv[iset] = (s[0], s[1], np.asarray(estimates))
# we need to estimate ROC per each label
# XXX order of labels might not correspond to the one among 'estimates'
# which were used to make a decision... check
rocs, aucs = [], [] # 1 per label
for i,label in enumerate(labels):
aucs_pl = []
ROCs_pl = []
for s in sets_wv:
targets_pl = (np.asanyarray(s[0]) == label).astype(int)
# XXX we might unify naming between AUC/ROC
ROC = auc_error
aucs_pl += [ROC([np.asanyarray(x)[i] for x in s[2]], targets_pl)]
ROCs_pl.append(ROC)
if len(aucs_pl)>0:
rocs += [ROCs_pl]
aucs += [nanmean(aucs_pl)]
#aucs += [np.mean(aucs_pl)]
# store results within the object
self._ROCs = rocs
self._aucs = aucs
self.__computed = True
@property
def aucs(self):
"""Compute and return set of AUC values 1 per label
"""
self._compute()
return self._aucs
@property
##REF: Name was automagically refactored
def rocs(self):
self._compute()
return self._ROCs
def plot(self, label_index=0):
"""
TODO: make it friendly to labels given by values?
should we also treat labels_map?
"""
externals.exists("pylab", raise_=True)
import pylab as pl
self._compute()
labels = self._labels
# select only rocs for the given label
rocs = self.rocs[label_index]
fig = pl.gcf()
ax = pl.gca()
pl.plot([0, 1], [0, 1], 'k:')
for ROC in rocs:
pl.plot(ROC.fp, ROC.tp, linewidth=1)
pl.axis((0.0, 1.0, 0.0, 1.0))
pl.axis('scaled')
pl.title('Label %s. Mean AUC=%.2f' % (label_index, self.aucs[label_index]))
pl.xlabel('False positive rate')
pl.ylabel('True positive rate')
class ConfusionMatrix(SummaryStatistics):
"""Class to contain information and display confusion matrix.
Implementation of the `SummaryStatistics` in the case of
classification problem. Actual computation of confusion matrix is
delayed until all data is acquired (to figure out complete set of
labels). If testing data doesn't have a complete set of labels,
but you like to include all labels, provide them as a parameter to
the constructor.
Confusion matrix provides a set of performance statistics (use
as_string(description=True) for the description of abbreviations),
as well ROC curve (http://en.wikipedia.org/wiki/ROC_curve)
plotting and analysis (AUC) in the limited set of problems:
binary, multiclass 1-vs-all.
"""
_STATS_DESCRIPTION = (
('TP', 'true positive (AKA hit)', None),
('TN', 'true negative (AKA correct rejection)', None),
('FP', 'false positive (AKA false alarm, Type I error)', None),
('FN', 'false negative (AKA miss, Type II error)', None),
('TPR', 'true positive rate (AKA hit rate, recall, sensitivity)',
'TPR = TP / P = TP / (TP + FN)'),
('FPR', 'false positive rate (AKA false alarm rate, fall-out)',
'FPR = FP / N = FP / (FP + TN)'),
('ACC', 'accuracy', 'ACC = (TP + TN) / (P + N)'),
('SPC', 'specificity', 'SPC = TN / (FP + TN) = 1 - FPR'),
('PPV', 'positive predictive value (AKA precision)',
'PPV = TP / (TP + FP)'),
('NPV', 'negative predictive value', 'NPV = TN / (TN + FN)'),
('FDR', 'false discovery rate', 'FDR = FP / (FP + TP)'),
('MCC', "Matthews Correlation Coefficient",
"MCC = (TP*TN - FP*FN)/sqrt(P N P' N')"),
('F1', 'F1 score',
"F1 = 2TP / (P + P') = 2TP / (2TP + FP + FN)"),
('AUC', "Area under (AUC) curve", None),
('CHI^2', "Chi-square of confusion matrix", None),
('LOE(ACC)', "Linear Order Effect in ACC across sets", None),
## ('Friedman(TPR)',
## "Friedman CHI^2 test of TPRs consistencies across sets", None),
## ('Friedman(CM)',
## "Friedman CHI^2 test of full CM (normalized) consistencies across sets", None),
) + SummaryStatistics._STATS_DESCRIPTION
def __init__(self, labels=None, labels_map=None, **kwargs):
"""Initialize ConfusionMatrix with optional list of `labels`
Parameters
----------
labels : list
Optional set of labels to include in the matrix
labels_map : None or dict
Dictionary from original dataset to show mapping into
numerical labels
targets
Optional set of targets
predictions
Optional set of predictions
"""
SummaryStatistics.__init__(self, **kwargs)
if labels == None:
labels = []
self.__labels = labels
"""List of known labels"""
self.__labels_in_custom_order = bool(len(labels))
"""So we know later on either we could resort them"""
self.__labels_map = labels_map
"""Mapping from original into given labels"""
self.__matrix = None
"""Resultant confusion matrix"""
def __call__(self, predictions, targets, estimates=None, store=False):
"""Computes confusion matrix (counts)
It would rely on previously provided 'labels' to define columns/rows
of the matrix to assure consistency across multiple invocations.
Parameters
----------
store : bool, optional
By default, this function does not modify an existing
instance of the ConfusionMatrix, and is just used merely to
provide a resultant confusion matrix. If 'store' set to
True, provided set of predictions and targets would be added
to the sets.
Returns
-------
numpy.ndarray
counts of hits with rows -- predictions, columns -- targets
"""
labels = self.__labels
if labels is None or not len(labels):
raise RuntimeError("ConfusionMatrix must have labels assigned prior"
"__call__()")
# verify that we know all the labels
labels_set = set(labels)
if not (labels_set.issuperset(targets)
and labels_set.issuperset(predictions)):
raise ValueError("Known labels %r does not include some labels "
"found in predictions %r or targets %r provided"
% (labels_set, set(predictions), set(targets)))
Nlabels = len(labels_set)
cm = np.zeros( (Nlabels, Nlabels), dtype=int )
rev_map = dict([ (x[1], x[0]) for x in enumerate(labels)])
for t,p in zip(targets, predictions):
cm[rev_map[p], rev_map[t]] += 1
if store:
self.add(targets=targets, predictions=predictions, estimates=estimates)
return cm
# XXX might want to remove since summaries does the same, just without
# supplying labels
@property
def matrices(self):
"""Return a list of separate confusion matrix per each stored set"""
return [ self.__class__(labels=self.labels,
labels_map=self.labels_map,
sets=[x]) for x in self.sets]
def _compute(self):
"""Actually compute the confusion matrix based on all the sets"""
super(ConfusionMatrix, self)._compute()
if __debug__:
if not self.__matrix is None:
debug("LAZY",
"Have to recompute %s#%s" \
% (self.__class__.__name__, id(self)))
# TODO: BinaryClassifier might spit out a list of predictions for each
# value need to handle it... for now just keep original labels
try:
# figure out what labels we have
labels = \
list(reduce(lambda x, y: x.union(set(y[0]).union(set(y[1]))),
self.sets,
set(self.__labels)))
except:
labels = self.__labels
# Check labels_map if it was provided if it covers all the labels
labels_map = self.__labels_map
if labels_map is not None:
labels_set = set(labels)
map_labels_set = set(labels_map.values())
if not map_labels_set.issuperset(labels_set):
warning("Provided labels_map %s is not coherent with labels "
"provided to ConfusionMatrix. No reverse mapping "
"will be provided" % labels_map)
labels_map = None
# Create reverse map
labels_map_rev = None
if labels_map is not None:
labels_map_rev = {}
for k,v in labels_map.iteritems():
v_mapping = labels_map_rev.get(v, [])
v_mapping.append(k)
labels_map_rev[v] = v_mapping
self.__labels_map_rev = labels_map_rev
labels.sort()
if self.__labels is None or not len(self.__labels):
self.__labels = labels # just store the recomputed labels
else:
# we should append them to already known ones
# Otherwise order of labels known before might be altered
add_labels = [x for x in labels if not (x in self.__labels)]
if len(add_labels):
self.__labels += add_labels
labels = self.__labels # and use them later on
if not self.__labels_in_custom_order:
labels.sort()
Nlabels, Nsets = len(labels), len(self.sets)
if __debug__:
debug("CM", "Got labels %s" % labels)
# Create a matrix for all votes
mat_all = np.zeros( (Nsets, Nlabels, Nlabels), dtype=int )
# create total number of samples of each label counts
# just for convinience I guess since it can always be
# computed from mat_all
counts_all = np.zeros( | |
name)
self.m_event_dispatcher.fire_event(_event)
def notify_namespace(self):
"""
Notify that a namespace update query should be done.
"""
_event = CEventNamespace()
self.m_event_dispatcher.fire_event(_event)
def get_state(self):
return self.m_state_manager.get_state()
def verify_broken(self):
if self.m_state_manager.get_state() != STATE_BROKEN:
raise DebuggerNotBroken
def get_current_filename(self, frame_index, fException):
"""
Return path of sources corresponding to the frame at depth
'frame_index' down the stack of the current thread.
"""
ctx = self.get_current_ctx()
try:
f = None
base_frame = ctx.frame_acquire()
(f, frame_lineno) = ctx.get_frame(base_frame, frame_index, fException)
frame_filename = calc_frame_path(f)
return frame_filename
finally:
f = None
base_frame = None
ctx.frame_release()
def get_threads(self):
return self.m_threads
def set_break_dont_lock(self):
self.m_f_first_to_break = True
self.m_state_manager.set_state(STATE_BROKEN, fLock = False)
self.set_break_flag()
self.set_all_tracers()
def request_break(self):
"""
Ask debugger to break (pause debuggee).
"""
if len(self.m_threads) == 0:
self.wait_for_first_thread()
try:
self.m_state_manager.acquire()
if self.m_state_manager.get_state() == STATE_BROKEN:
return
self.set_break_dont_lock()
finally:
self.m_state_manager.release()
self.send_events(None)
def request_go_quiet(self, fLock = True):
try:
self.request_go(fLock)
except DebuggerNotBroken:
pass
def request_go(self, fLock = True):
"""
Let debugger run.
"""
try:
if fLock:
self.m_state_manager.acquire()
self.verify_broken()
self.m_fUnhandledException = False
self.m_state_manager.set_state(STATE_RUNNING, fLock = False)
if self.m_fembedded:
time.sleep(0.33)
self.set_break_flag()
finally:
if fLock:
self.m_state_manager.release()
def request_go_breakpoint(self, filename, scope, lineno, frame_index, fException):
"""
Let debugger run until temp breakpoint as defined in the arguments.
"""
assert(is_unicode(filename))
assert(is_unicode(scope))
try:
self.m_state_manager.acquire()
self.verify_broken()
if filename in [None, '']:
_filename = self.get_current_filename(frame_index, fException)
elif not is_provider_filesystem(filename):
_filename = as_string(filename, sys.getfilesystemencoding())
else:
_filename = FindFile(filename, fModules = True)
self.m_bp_manager.set_temp_breakpoint(_filename, scope, lineno)
self.set_all_tracers()
self.request_go(fLock = False)
finally:
self.m_state_manager.release()
def request_step_quiet(self, fLock = True):
try:
self.request_step(fLock)
except DebuggerNotBroken:
pass
def request_step(self, fLock = True):
"""
Let debugger run until next statement is reached or a breakpoint
is hit in another thread.
"""
try:
if fLock:
self.m_state_manager.acquire()
self.verify_broken()
try:
ctx = self.get_current_ctx()
except NoThreads:
return
self.m_step_tid = ctx.m_thread_id
self.m_next_frame = None
self.m_return_frame = None
self.request_go(fLock = False)
finally:
if fLock:
self.m_state_manager.release()
def request_next(self):
"""
Let debugger run until next statement in the same frame
is reached or a breakpoint is hit in another thread.
"""
try:
self.m_state_manager.acquire()
self.verify_broken()
try:
ctx = self.get_current_ctx()
except NoThreads:
return
if self.m_lastest_event in ['return', 'exception']:
return self.request_step(fLock = False)
self.m_next_frame = ctx.m_frame
self.m_return_frame = None
self.request_go(fLock = False)
finally:
self.m_state_manager.release()
def request_return(self):
"""
Let debugger run until end of frame frame is reached
or a breakpoint is hit in another thread.
"""
try:
self.m_state_manager.acquire()
self.verify_broken()
try:
ctx = self.get_current_ctx()
except NoThreads:
return
if self.m_lastest_event == 'return':
return self.request_step(fLock = False)
self.m_next_frame = None
self.m_return_frame = ctx.m_frame
self.request_go(fLock = False)
finally:
self.m_state_manager.release()
def request_jump(self, lineno):
"""
Jump to line number 'lineno'.
"""
try:
self.m_state_manager.acquire()
self.verify_broken()
try:
ctx = self.get_current_ctx()
except NoThreads:
return
frame = ctx.m_frame
code = frame.f_code
valid_lines = CalcValidLines(code)
sbi = CScopeBreakInfo(as_unicode(''), valid_lines)
l = sbi.CalcScopeLine(lineno)
frame.f_lineno = l
finally:
frame = None
self.m_state_manager.release()
self.send_events(None)
def set_thread(self, tid):
"""
Switch focus to specified thread.
"""
try:
self.m_state_manager.acquire()
self.verify_broken()
try:
if (tid >= 0) and (tid < 100):
_tid = list(self.m_threads.keys())[tid]
else:
_tid = tid
ctx = self.m_threads[_tid]
except (IndexError, KeyError):
raise ThreadNotFound
self.m_current_ctx = ctx
self.m_lastest_event = ctx.m_event
finally:
self.m_state_manager.release()
self.send_events(None)
class CDebuggerEngine(CDebuggerCore):
"""
Main class for the debugger.
Adds functionality on top of CDebuggerCore.
"""
def __init__(self, fembedded = False):
CDebuggerCore.__init__(self, fembedded)
event_type_dict = {
CEventState: {},
CEventStackDepth: {},
CEventBreakpoint: {},
CEventThreads: {},
CEventNoThreads: {},
CEventThreadBroken: {},
CEventNamespace: {},
CEventUnhandledException: {},
CEventStack: {},
CEventNull: {},
CEventExit: {},
CEventForkSwitch: {},
CEventExecSwitch: {},
CEventSynchronicity: {},
CEventTrap: {},
CEventForkMode: {},
CEventPsycoWarning: {},
CEventConflictingModules: {},
CEventSignalIntercepted: {},
CEventSignalException: {},
CEventClearSourceCache: {},
CEventEmbeddedSync: {}
}
self.m_event_queue = CEventQueue(self.m_event_dispatcher)
self.m_event_queue.register_event_types(event_type_dict)
event_type_dict = {CEventSync: {}}
self.m_event_dispatcher.register_callback(self.send_events, event_type_dict, fSingleUse = False)
def shutdown(self):
self.m_event_queue.shutdown()
CDebuggerCore.shutdown(self)
def sync_with_events(self, fException, fSendUnhandled):
"""
Send debugger state to client.
"""
if len(self.m_threads) == 0:
self.wait_for_first_thread()
index = self.m_event_queue.get_event_index()
event = CEventSync(fException, fSendUnhandled)
self.m_event_dispatcher.fire_event(event)
return index
def trap_conflicting_modules(self):
modules_list = []
for m in CONFLICTING_MODULES:
if m in g_found_conflicting_modules:
continue
if not m in sys.modules:
continue
if m == 'psyco':
#
# Old event kept for compatibility.
#
event = CEventPsycoWarning()
self.m_event_dispatcher.fire_event(event)
g_found_conflicting_modules.append(m)
modules_list.append(as_unicode(m))
if modules_list == []:
return False
event = CEventConflictingModules(modules_list)
self.m_event_dispatcher.fire_event(event)
return True
def wait_for_event(self, timeout, event_index):
"""
Wait for new events and return them as list of events.
"""
self.cancel_request_go_timer()
self.trap_conflicting_modules()
(new_event_index, sel) = self.m_event_queue.wait_for_event(timeout, event_index)
if self.trap_conflicting_modules():
(new_event_index, sel) = self.m_event_queue.wait_for_event(timeout, event_index)
return (new_event_index, sel)
def set_breakpoint(self, filename, scope, lineno, fEnabled, expr, frame_index, fException, encoding):
print_debug('Setting breakpoint to: %s, %s, %d' % (repr(filename), scope, lineno))
assert(is_unicode(filename))
assert(is_unicode(scope))
assert(is_unicode(expr))
fLock = False
try:
if filename in [None, '']:
self.m_state_manager.acquire()
fLock = True
self.verify_broken()
_filename = self.get_current_filename(frame_index, fException)
elif not is_provider_filesystem(filename):
_filename = as_string(filename, sys.getfilesystemencoding())
else:
_filename = FindFile(filename, fModules = True)
if expr != '':
try:
encoding = self.__calc_encoding(encoding, filename = _filename)
_expr = as_bytes(ENCODING_SOURCE % encoding + expr, encoding)
compile(_expr, '<string>', 'eval')
except:
raise SyntaxError
encoding = as_unicode(encoding)
bp = self.m_bp_manager.set_breakpoint(_filename, scope, lineno, fEnabled, expr, encoding)
self.set_all_tracers()
event = CEventBreakpoint(bp)
#print_debug(repr(vars(bp)))
self.m_event_dispatcher.fire_event(event)
finally:
if fLock:
self.m_state_manager.release()
def disable_breakpoint(self, id_list, fAll):
self.m_bp_manager.disable_breakpoint(id_list, fAll)
self.set_all_tracers()
event = CEventBreakpoint(None, CEventBreakpoint.DISABLE, id_list, fAll)
self.m_event_dispatcher.fire_event(event)
def enable_breakpoint(self, id_list, fAll):
self.m_bp_manager.enable_breakpoint(id_list, fAll)
self.set_all_tracers()
event = CEventBreakpoint(None, CEventBreakpoint.ENABLE, id_list, fAll)
self.m_event_dispatcher.fire_event(event)
def delete_breakpoint(self, id_list, fAll):
self.m_bp_manager.delete_breakpoint(id_list, fAll)
self.set_all_tracers()
event = CEventBreakpoint(None, CEventBreakpoint.REMOVE, id_list, fAll)
self.m_event_dispatcher.fire_event(event)
def get_breakpoints(self):
"""
return id->breakpoint dictionary.
"""
bpl = self.m_bp_manager.get_breakpoints()
_items = [(id, breakpoint_copy(bp)) for (id, bp) in bpl.items()]
for (id, bp) in _items:
bp.m_code = None
_bpl = dict(_items)
return _bpl
def send_events(self, event):
"""
Send series of events that define the debugger state.
"""
if isinstance(event, CEventSync):
fException = event.m_fException
fSendUnhandled = event.m_fSendUnhandled
else:
fException = False
fSendUnhandled = False
try:
if isinstance(event, CEventSync) and not fException:
self.m_state_manager.set_state()
self.send_stack_depth()
self.send_threads_event(fException)
self.send_stack_event(fException)
self.send_namespace_event()
if fSendUnhandled and self.m_fUnhandledException:
self.send_unhandled_exception_event()
except NoThreads:
self.send_no_threads_event()
except:
print_debug_exception()
raise
def send_unhandled_exception_event(self):
event = CEventUnhandledException()
self.m_event_dispatcher.fire_event(event)
def send_stack_depth(self):
"""
Send event with stack depth and exception stack depth.
"""
f = None
tb = None
ctx = self.get_current_ctx()
try:
try:
f = ctx.frame_acquire()
except ThreadDone:
return
s = my_extract_stack(f)
s = [1 for (a, b, c, d) in s if g_fDebug or c != 'rpdb2_import_wrapper']
stack_depth = len(s)
tb = get_traceback(f, ctx)
if tb == None:
stack_depth_exception = None
else:
s = my_extract_stack(tb.tb_frame.f_back)
s += my_extract_tb(tb)
s = [1 for (a, b, c, d) in s if g_fDebug or c != 'rpdb2_import_wrapper']
stack_depth_exception = len(s)
event = CEventStackDepth(stack_depth, stack_depth_exception)
self.m_event_dispatcher.fire_event(event)
finally:
f = None
tb = None
ctx.frame_release()
def send_threads_event(self, fException):
"""
Send event with current thread list.
In case of exception, send only the current thread.
"""
tl = self.get_thread_list()
if fException:
ctid = tl[0]
itl = tl[1]
_itl = [a for a in itl if a[DICT_KEY_TID] == ctid]
_tl = (ctid, _itl)
else:
_tl = tl
event = CEventThreads(*_tl)
self.m_event_dispatcher.fire_event(event)
def send_stack_event(self, fException):
sl = self.get_stack([], False, fException)
if len(sl) == 0:
return
event = CEventStack(sl[0])
self.m_event_dispatcher.fire_event(event)
def send_namespace_event(self):
"""
Send event notifying namespace should be queried again.
"""
event = CEventNamespace()
self.m_event_dispatcher.fire_event(event)
def send_no_threads_event(self):
_event = CEventNoThreads()
self.m_event_dispatcher.fire_event(_event)
def send_event_null(self):
"""
Make the event waiter return.
"""
event = CEventNull()
self.m_event_dispatcher.fire_event(event)
def __get_stack(self, ctx, ctid, fException):
tid = ctx.m_thread_id
f = None
_f = None
tb = None
_tb = None
try:
try:
f = ctx.frame_acquire()
except ThreadDone:
return None
if fException:
tb = get_traceback(f, ctx)
if tb == None:
raise NoExceptionFound
_tb = tb
while _tb.tb_next is not None:
_tb = _tb.tb_next
_f = _tb.tb_frame
s = my_extract_stack(tb.tb_frame.f_back)
s += my_extract_tb(tb)
else:
_f = f
s = my_extract_stack(f)
code_list = []
while | |
`keyvalue`. Only the first such occurrence is
removed."""
packet_coord = list(self[keyname]).index(keyvalue)
loopnames = self.GetLoopNames(keyname)
for dataname in loopnames:
self.block[dataname][0] = list(self.block[dataname][0])
del self.block[dataname][0][packet_coord]
self.block[dataname][1] = list(self.block[dataname][1])
del self.block[dataname][1][packet_coord]
def GetKeyedPacket(self,keyname,keyvalue,no_case=False):
"""Return the loop packet (a `StarPacket` object) where `keyname` has value
`keyvalue`. Ignore case in `keyvalue` if `no_case` is True. `ValueError`
is raised if no packet is found or more than one packet is found."""
my_loop = self.GetLoop(keyname)
#print("Looking for %s in %s" % (keyvalue, my_loop.parent_block))
#print('Packet check on:' + keyname)
#[print(repr(getattr(a,keyname))) for a in my_loop]
if no_case:
one_pack= [a for a in my_loop if getattr(a,keyname).lower()==keyvalue.lower()]
else:
one_pack= [a for a in my_loop if getattr(a,keyname)==keyvalue]
if len(one_pack)!=1:
raise ValueError("Bad packet key %s = %s: returned %d packets" % (keyname,keyvalue,len(one_pack)))
print("Keyed packet: %s" % one_pack[0])
return one_pack[0]
def GetCompoundKeyedPacket(self,keydict):
"""Return the loop packet (a `StarPacket` object) where the `{key:(value,caseless)}` pairs
in `keydict` take the appropriate values. Ignore case for a given `key` if `caseless` is
True. `ValueError` is raised if no packet is found or more than one packet is found."""
#print "Looking for %s in %s" % (keyvalue, self.parent_block[keyname])
keynames = list(keydict.keys())
my_loop = self.GetLoop(keynames[0])
for one_key in keynames:
keyval,no_case = keydict[one_key]
if no_case:
my_loop = list([a for a in my_loop if str(getattr(a,one_key)).lower()==str(keyval).lower()])
else:
my_loop = list([a for a in my_loop if getattr(a,one_key)==keyval])
if len(my_loop)!=1:
raise ValueError("Bad packet keys %s: returned %d packets" % (repr(keydict),len(my_loop)))
print("Compound keyed packet: %s" % my_loop[0])
return my_loop[0]
def GetKeyedSemanticPacket(self,keyvalue,cat_id):
"""Return a complete packet for category `cat_id` where the
category key for the category equals `keyvalue`. This routine
will understand any joined loops, so if separate loops in the
datafile belong to the
same category hierarchy (e.g. `_atom_site` and `_atom_site_aniso`),
the returned `StarPacket` object will contain datanames from
both categories."""
target_keys = self.dictionary.cat_key_table[cat_id]
target_keys = [k[0] for k in target_keys] #one only in each list
p = StarPacket()
# set case-sensitivity flag
lcase = False
if self.dictionary[target_keys[0]]['_type.contents'] in ['Code','Tag','Name']:
lcase = True
for cat_key in target_keys:
try:
extra_packet = self.GetKeyedPacket(cat_key,keyvalue,no_case=lcase)
except KeyError: #missing key
try:
test_key = self[cat_key] #generate key if possible
print('Test key is %s' % repr( test_key ))
if test_key is not None and\
not (isinstance(test_key,list) and (None in test_key or len(test_key)==0)):
print('Getting packet for key %s' % repr( keyvalue ))
extra_packet = self.GetKeyedPacket(cat_key,keyvalue,no_case=lcase)
except: #cannot be generated
continue
except ValueError: #none/more than one, assume none
continue
#extra_packet = self.dictionary.generate_default_packet(cat_id,cat_key,keyvalue)
p.merge_packet(extra_packet)
# the following attributes used to calculate missing values
for keyname in target_keys:
if hasattr(p,keyname):
p.key = [keyname]
break
if not hasattr(p,"key"):
raise ValueError("No key found for %s, packet is %s" % (cat_id,str(p)))
p.cif_dictionary = self.dictionary
p.fulldata = self
return p
def GetMultiKeyedSemanticPacket(self,keydict,cat_id):
"""Return a complete packet for category `cat_id` where the keyvalues are
provided as a dictionary of key:(value,caseless) pairs
This routine
will understand any joined loops, so if separate loops in the
datafile belong to the
same category hierarchy (e.g. `_atom_site` and `_atom_site_aniso`),
the returned `StarPacket` object will contain datanames from
the requested category and any children."""
#if len(keyvalues)==1: #simplification
# return self.GetKeyedSemanticPacket(keydict[1][0],cat_id)
target_keys = self.dictionary.cat_key_table[cat_id]
# update the dictionary passed to us with all equivalents, for
# simplicity.
parallel_keys = list(zip(*target_keys)) #transpose
print('Parallel keys:' + repr(parallel_keys))
print('Keydict:' + repr(keydict))
start_keys = list(keydict.keys())
for one_name in start_keys:
key_set = [a for a in parallel_keys if one_name in a]
for one_key in key_set:
keydict[one_key] = keydict[one_name]
# target_keys is a list of lists, each of which is a compound key
p = StarPacket()
# a little function to return the dataname for a key
def find_key(key):
for one_key in self.dictionary.key_equivs.get(key,[])+[key]:
if self.has_key(one_key):
return one_key
return None
for one_set in target_keys: #loop down the categories
true_keys = [find_key(k) for k in one_set]
true_keys = [k for k in true_keys if k is not None]
if len(true_keys)==len(one_set):
truekeydict = dict([(t,keydict[k]) for t,k in zip(true_keys,one_set)])
try:
extra_packet = self.GetCompoundKeyedPacket(truekeydict)
except KeyError: #one or more are missing
continue #should try harder?
except ValueError:
continue
else:
continue
print('Merging packet for keys ' + repr(one_set))
p.merge_packet(extra_packet)
# the following attributes used to calculate missing values
p.key = true_keys
p.cif_dictionary = self.dictionary
p.fulldata = self
return p
def set_grammar(self,new_grammar):
self.string_delimiters = ["'",'"',"\n;",None]
if new_grammar in ['STAR2','2.0']:
self.string_delimiters += ['"""',"'''"]
if new_grammar == '2.0':
self.list_delimiter = " "
elif new_grammar == 'STAR2':
self.list_delimiter = ", "
elif new_grammar not in ['1.0','1.1']:
raise StarError('Request to set unknown grammar %s' % new_grammar)
def SetOutputLength(self,wraplength=80,maxoutlength=2048):
"""Set the maximum output line length (`maxoutlength`) and the line length to
wrap at (`wraplength`). The wrap length is a target only and may not always be
possible."""
if wraplength > maxoutlength:
raise StarError("Wrap length (requested %d) must be <= Maximum line length (requested %d)" % (wraplength,maxoutlength))
self.wraplength = wraplength
self.maxoutlength = maxoutlength
def printsection(self,instring='',blockstart="",blockend="",indent=0,finish_at='',start_from=''):
self.provide_value = False
# first make an ordering
self.create_ordering(finish_at,start_from) #create self.output_order
# now do it...
if not instring:
outstring = CIFStringIO(target_width=80) # the returned string
else:
outstring = instring
# print block delimiter
outstring.write(blockstart,canbreak=True)
while len(self.output_order)>0:
#print "Remaining to output " + `self.output_order`
itemname = self.output_order.pop(0)
if not isinstance(itemname,int): #no loop
item_spec = [i for i in self.formatting_hints if i['dataname'].lower()==itemname.lower()]
if len(item_spec)>0:
item_spec = item_spec[0]
col_pos = item_spec.get('column',-1)
name_pos = item_spec.get('name_pos',-1)
else:
col_pos = -1
item_spec = {}
name_pos = -1
if col_pos < 0: col_pos = 40
outstring.set_tab(col_pos)
itemvalue = self[itemname]
outstring.write(self.true_case[itemname],mustbreak=True,do_tab=False,startcol=name_pos)
outstring.write(' ',canbreak=True,do_tab=False,delimiter=True) #space after itemname
self.format_value(itemvalue,outstring,hints=item_spec)
else:# we are asked to print a loop block
outstring.set_tab(10) #guess this is OK?
loop_spec = [i['name_pos'] for i in self.formatting_hints if i["dataname"]=='loop']
if loop_spec:
loop_indent = max(loop_spec[0],0)
else:
loop_indent = indent
outstring.write('loop_\n',mustbreak=True,do_tab=False,startcol=loop_indent)
self.format_names(outstring,indent+2,loop_no=itemname)
self.format_packets(outstring,indent+2,loop_no=itemname)
else:
returnstring = outstring.getvalue()
outstring.close()
return returnstring
def format_names(self,outstring,indent=0,loop_no=-1):
"""Print datanames from `loop_no` one per line"""
temp_order = self.loops[loop_no][:] #copy
format_hints = dict([(i['dataname'],i) for i in self.formatting_hints if i['dataname'] in temp_order])
while len(temp_order)>0:
itemname = temp_order.pop(0)
req_indent = format_hints.get(itemname,{}).get('name_pos',indent)
outstring.write(' ' * req_indent,do_tab=False)
outstring.write(self.true_case[itemname],do_tab=False)
outstring.write("\n",do_tab=False)
def format_packets(self,outstring,indent=0,loop_no=-1):
alldata = [self[a] for a in self.loops[loop_no]]
loopnames = self.loops[loop_no]
#print 'Alldata: %s' % `alldata`
packet_data = list(zip(*alldata))
#print 'Packet data: %s' % `packet_data`
#create a dictionary for quick lookup of formatting requirements
format_hints = dict([(i['dataname'],i) for i in self.formatting_hints if i['dataname'] in loopnames])
for position in range(len(packet_data)):
if position > 0:
outstring.write("\n") #new line each packet except first
for point in range(len(packet_data[position])):
datapoint = packet_data[position][point]
format_hint = format_hints.get(loopnames[point],{})
packstring = self.format_packet_item(datapoint,indent,outstring,format_hint)
outstring.write(' ',canbreak=True,do_tab=False,delimiter=True)
def format_packet_item(self,pack_item,indent,outstring,format_hint):
# print 'Formatting %s' % `pack_item`
# temporary check for any non-unicode items
if isinstance(pack_item,str) and not isinstance(pack_item,unicode):
raise StarError("Item {0!r} is not unicode".format(pack_item))
if isinstance(pack_item,unicode):
delimiter = format_hint.get('delimiter',None)
startcol = format_hint.get('column',-1)
outstring.write(self._formatstring(pack_item,delimiter=delimiter),startcol=startcol)
else:
self.format_value(pack_item,outstring,hints = format_hint)
def _formatstring(self,instring,delimiter=None,standard='CIF1',indent=0,hints={}):
if hints.get("reformat",False) and "\n" in instring:
instring = "\n"+self.do_wrapping(instring,hints["reformat_indent"])
allowed_delimiters = set(self.string_delimiters)
if len(instring)==0: allowed_delimiters.difference_update([None])
if len(instring) > (self.maxoutlength-2) or '\n' in instring:
allowed_delimiters.intersection_update(["\n;","'''",'"""'])
[allowed_delimiters.difference_update([None]) for k in '[]{}\v\t ,' if k in instring]
if len(instring)>0 and instring[0] in '_$#;(':
allowed_delimiters.difference_update([None])
if len(instring)>3 and (instring[:4].lower()=='data' or instring[:4].lower()=='save'):
allowed_delimiters.difference_update([None])
if len(instring)>5 and instring[:6].lower()=='global':
allowed_delimiters.difference_update([None])
if '"' in instring: allowed_delimiters.difference_update(['"',None])
if "'" in instring: allowed_delimiters.difference_update(["'",None])
out_delimiter = "\n;" #default (most conservative)
if delimiter in allowed_delimiters:
out_delimiter = delimiter
elif "'" in allowed_delimiters: out_delimiter = "'"
elif '"' in allowed_delimiters: out_delimiter = '"'
if out_delimiter in ['"',"'",'"""',"'''"]: return out_delimiter + instring + out_delimiter
elif out_delimiter is None: return instring
# we are left with semicolon strings
# use our protocols:
maxlinelength = max([len(a) for a in instring.split('\n')])
if maxlinelength > self.maxoutlength:
protocol_string = apply_line_folding(instring)
else:
protocol_string = instring
# now check for embedded delimiters
if "\n;" in protocol_string:
prefix = "CIF:"
while prefix in protocol_string: prefix = prefix + ":"
protocol_string = apply_line_prefix(protocol_string,prefix+"> ")
return "\n;" + protocol_string + "\n;"
def format_value(self,itemvalue,stringsink,compound=False,hints={}):
"""Format a Star data value"""
global have_numpy
delimiter = hints.get('delimiter',None)
startcol = hints.get('column',-1)
if isinstance(itemvalue,str) and not isinstance(itemvalue,unicode): #not allowed
raise StarError("Non-unicode value {0} found in block".format(itemvalue))
if isinstance(itemvalue,unicode): #need to sanitize
stringsink.write(self._formatstring(itemvalue,delimiter=delimiter,hints=hints),canbreak = True,startcol=startcol)
elif isinstance(itemvalue,(list)) or (hasattr(itemvalue,'dtype') and hasattr(itemvalue,'__iter__')): #numpy
stringsink.set_tab(0)
stringsink.write('[',canbreak=True,newindent=True,mustbreak=compound,startcol=startcol)
if len(itemvalue)>0:
self.format_value(itemvalue[0],stringsink)
| |
import copy
import gc
import numpy
from numpy.linalg import LinAlgError
import joblib
import pandas
import psutil
import pygmo
from scipy.optimize import minimize
from scipy.optimize import differential_evolution
import time
from typing import Dict, List, Tuple
import warnings
from .constants import Constants
from .constants import Messages
from .datatypes import Measurement
from .datatypes import Sensitivity
from .generalized_islands import ArchipelagoHelpers
from .generalized_islands import LossCalculator
from .generalized_islands import ParallelEstimationInfo
from .model_checking import ModelChecker
from .oed import CovOptimality
from .parameter import Parameter
from .parameter import ParameterMapper
from .parameter import ParameterManager
from .simulation import ExtendedSimulator
from .utils import Calculations
from .utils import OwnDict
from .utils import Helpers
EPS64 = Constants.eps_float64
PRETTY_METRICS = Constants.pretty_metrics
SINGLE_ID = Constants.single_id
class Caretaker():
"""
Manages (takes care of) all major methods related to simulation, estimation,
and evaluation of dynamic bioprocess models and its observation functions.
Exposes several convient methods of the individual classes in this module.
"""
def __init__(self,
bioprocess_model_class, model_parameters, states:list=None, initial_values:dict=None, replicate_ids:list=None,
initial_switches:list=None, model_name:str=None, observation_functions_parameters:List[tuple]=None,
model_checking_assistance:bool=True,
):
"""
Arguments
---------
bioprocess_model_class : Subclass of BioprocessModel
This class implements the bioprocess model.
model_parameters : list or dict
The model parameters, as specified in `bioprocess_model_class`.
Keyword arguments
-----------------
states : list
The model states, as specified in `bioprocess_model_class`.
Default is none, which enforces `initial_values` not to be None.
initial_values : dict
Initial values to the model, keys must match the states with a trailing '0'.
Default is None, which enforces `states` not to be None.
replicate_ids : list
Unique ids of replicates, for which the full model applies.
The parameters, as specified for the model and observation functions are considered as global ones,
which may have different names and values for each replicate.
Default is None, which implies a single replicate model.
initial_switches : list
A list of booleans, indicating the initial state of switches.
Number of switches must correpond to the number of return events in method `state_events`,
if this method is implemented by the inheriting class.
Default is None, which enables auto-detection of initial switches, which all will be False.
model_name : str
A descriptive model name.
Default is None.
observation_funtions_parameters : list of tuples
Each tuple stores a subclass of ObservationFunction
and a dictionary of its correponding parametrization.
Default is None, which implies that there are no ObservationFunctions.
model_checking_assistance : bool
Runs a few sanity and call checks on the implemented model
"""
# store arguements for later use
self.__bioprocess_model_class = bioprocess_model_class
self.__model_parameters = model_parameters
self.__states = states
self.__initial_values = initial_values
self.__replicate_ids = replicate_ids
self.__initial_switches = initial_switches
self.__model_name = model_name
self.__observation_functions_parameters = observation_functions_parameters
self.replicate_ids = replicate_ids
if model_name is None:
self.model_name = bioprocess_model_class.__name__
else:
self.model_name = model_name
# Create an ExtendendSimulator instance for each replicate id
model_checker = ModelChecker()
self.simulators = {}
for _replicate_id in self.replicate_ids:
if _replicate_id is None:
_model_name = model_name
else:
_model_name = f'{model_name}_{_replicate_id}'
_simulator = ExtendedSimulator(
bioprocess_model_class,
model_parameters,
states,
initial_values,
initial_switches,
_model_name,
observation_functions_parameters,
_replicate_id,
)
if model_checking_assistance:
if not model_checker.check_model_consistency(copy.deepcopy(_simulator)):
warnings.warn(f'There might by some issues for {_model_name} with replicate_id {_replicate_id}')
self.simulators[_replicate_id] = _simulator
# Create a ParameterManager object
self._parameter_manager = ParameterManager(
self.replicate_ids,
self.simulators[self.replicate_ids[0]].get_all_parameters(),
)
self.optimizer_kwargs = None
#%% Properties
@property
def replicate_ids(self) -> list:
return self._replicate_ids
@replicate_ids.setter
def replicate_ids(self, value):
if value is None:
self._replicate_ids = [SINGLE_ID]
else:
if not Helpers.has_unique_ids(value):
raise ValueError(Messages.non_unique_ids)
self._replicate_ids = value
@property
def parameter_mapping(self):
return self._parameter_manager.parameter_mapping
@property
def optimizer_kwargs(self) -> dict:
return self._optimizer_kwargs
@optimizer_kwargs.setter
def optimizer_kwargs(self, value):
if value is not None and not isinstance(value, dict):
raise ValueError('Optimizer kwargs must be either `None` or a dictionary')
self._optimizer_kwargs = value
#%% Public methods
def add_replicate(self, replicate_id:str, mappings:List[ParameterMapper]=None):
"""
Adds another replicate to the multi model Caretaker object.
Arguments
---------
replicate_id : str
The new replicate_id to be added.
Keyword arguments
-----------------
mappings : list of ParameterMapper or tuple
A list parameter mappings that should be applied to the new replicate_id.
Default is None, which implies that the local parameters names for the new replicate correspond to the global names.
Raises
------
AttributeError
In case the Caretaker object was created without explicit `replicate_ids` argument.
ValueError
The new replicate_id is not unique including the extisting ones.
KeyError
Any of the `mappings` aims not for the new replicate_id.
"""
# store current parameter mapping
_parameter_mappers = self._parameter_manager.get_parameter_mappers()
_parameters = self._get_all_parameters()
if len(self.replicate_ids) == 1 and self.replicate_ids[0] is None:
raise AttributeError('Cannot add replicate_id to implicitly defined single replicate Caretaker object')
_updated_replicate_ids = copy.deepcopy(self.replicate_ids)
_updated_replicate_ids.append(replicate_id)
if not Helpers.has_unique_ids(_updated_replicate_ids):
raise ValueError(Messages.non_unique_ids)
if mappings is not None:
for _mapping in mappings:
if _mapping.replicate_id != replicate_id:
raise KeyError('The given mapping does not aim for the new replicate')
self.__init__(
bioprocess_model_class=self.__bioprocess_model_class,
model_parameters=self.__model_parameters,
states=self.__states,
initial_values=self.__initial_values,
replicate_ids=_updated_replicate_ids,
initial_switches=self.__initial_switches,
model_name=self.__model_name,
observation_functions_parameters=self.__observation_functions_parameters,
)
self.set_parameters(_parameters)
self.apply_mappings(_parameter_mappers)
if mappings is not None:
self.apply_mappings(mappings)
def simulate(self, t:numpy.ndarray, parameters:dict=None, verbosity:int=40, reset_afterwards:bool=False, suppress_stdout:bool=True) -> list:
"""
Runs a forward simulation for the fully specified model and its observation functions (if specified).
Arguments
---------
t : numpy.ndarray or float
The time points for integration. In case a single time point is provided,
the solver will treat this as final integration time and chooses the intermediate steps on its own.
Keyword arguments
-----------------
parameters : dict
In case a simulation for specific parameter values is wanted.
Default is None.
verbosity : int
Prints solver statistics (quiet = 50, whisper = 40, normal = 30, loud = 20, scream = 10).
Default is 30.
suppress_stdout : bool
No printouts of integrator warnings, which are directed to stdout by the assimulo package.
Set to False for model debugging purposes.
Default is True.
Returns
-------
simulations : list
The collection of simulations results as list of ModelState or Observation objects.
"""
if parameters is not None:
_original_parameters = self._get_all_parameters()
self.set_parameters(parameters)
simulations = []
for _id in self.simulators.keys():
_simulator = self.simulators[_id]
simulations.extend(_simulator.simulate(t=t, verbosity=verbosity, reset_afterwards=reset_afterwards, suppress_stdout=suppress_stdout))
if parameters is not None:
self.set_parameters(_original_parameters)
return simulations
def estimate(self,
unknowns:dict, measurements:List[Measurement], bounds:List[Tuple]=None, metric:str='negLL', use_global_optimizer:bool=None,
report_level:int=0, reset_afterwards:bool=False, handle_CVodeError:bool=True, optimizer_kwargs:dict=None,
) -> Tuple[dict, dict]:
"""
Estimates values for requested unknowns according to a specific metric, given some measurements.
Arguments
---------
unknowns : dict or list
The parameters to be estimated. Can be any of the model parameters, initial values or observation parameters.
Providing a list of valid unknowns causes the use of scipy's differential evolution global optimizer.
A dictionary with parameter:guess as key-value pairs is needed to use the local but faster minimizer.
measurements : List[Measurement]
The data from which the parameter estimation is performed.
Can provide a Measurement object for any model state or observation.
Keyword arguments
-----------------
bounds : list of tuples
Bounds for for each unknown to be estimated.
Must be provided for use with differential evolution optimizer.
Default is None.
metric : str
The metric according to which the loss to be minimized is calculated.
Can be one of, e.g. `negLL` (negative log-likelihood), 'SS' (sum of squares), or `WSS` (weighted sum of squares).
Default is `negLL`, which implies that the corresponding Measurement object are accordingly specified.
use_global_optimizer : bool
Enforce the use of differential evolution optimizer.
Default is None, which makes this decision based on the the type of `unknowns` and `bounds`.
report_level : int
Enables informative output about the estimation process.
Default is 0, which is no output.
1 = prints estimated parameters and runtime of the estimation job.
2 = prints additionally the `OptimizeResult` result object, as returned by the optimizer
3 = prints additionally handled CVodeErrors, which arise from toxic parameters.
This has only effect in case `handle_CVodeError` is True
4 = prints additionally the progress of the optimizer.
reset_afterwards : bool
To reset the Caretaker object after the estimation has finished.
Default is False.
handle_CVodeError : bool
Catches CVodeError raised by the solver, in order to not interrupt the estimations for toxic parameter values.
Default is True.
Returns
-------
estimations : dict
Key-value pairs of the unknowns and corresponding estimated values.
estimation_info : dict
Additional information about | |
np.array_equal(observed, np.fliplr(np.flipud(image)))
def test_randomness__two_flips(self):
# 50% horizontal flip, 50% vertical flip
aug = iaa.Sequential([
iaa.Fliplr(0.5),
iaa.Flipud(0.5)
])
frac_same = self._test_randomness__two_flips__compute_fraction_same(
aug, 200)
assert np.isclose(frac_same, 0.25, rtol=0, atol=0.1)
def test_randomness__two_flips__deterministic(self):
# 50% horizontal flip, 50% vertical flip
aug = iaa.Sequential([
iaa.Fliplr(0.5),
iaa.Flipud(0.5)
])
aug_det = aug.to_deterministic()
frac_same = self._test_randomness__two_flips__compute_fraction_same(
aug_det, 200)
assert (
np.isclose(frac_same, 0.0, rtol=0, atol=1e-5)
or np.isclose(frac_same, 1.0, rtol=0, atol=1e-5)
)
def _test_randomness__two_flips__compute_fraction_same(self, aug,
nb_iterations):
expected = [self.images, self.images_lr, self.images_ud,
self.images_lr_ud]
last_aug = None
nb_changed_aug = 0
for i in sm.xrange(nb_iterations):
observed_aug = aug.augment_images(self.images)
if i == 0:
last_aug = observed_aug
else:
if not np.array_equal(observed_aug, last_aug):
nb_changed_aug += 1
last_aug = observed_aug
assert np.any([np.array_equal(observed_aug, expected_i)
for expected_i in expected])
# should be the same in roughly 25% of all cases
frac_changed = nb_changed_aug / nb_iterations
return 1 - frac_changed
def test_random_order_true_images(self):
aug = iaa.Sequential([
iaa.Affine(translate_px={"x": 1}, mode="constant", cval=0, order=0),
iaa.Fliplr(1.0)
], random_order=True)
frac_12 = self._test_random_order_images_frac_12(aug, 200)
assert np.isclose(frac_12, 0.5, 0.075)
def test_random_order_false_images(self):
aug = iaa.Sequential([
iaa.Affine(translate_px={"x": 1}, mode="constant", cval=0, order=0),
iaa.Fliplr(1.0)
], random_order=False)
frac_12 = self._test_random_order_images_frac_12(aug, 25)
assert frac_12 >= 1.0 - 1e-4
def test_random_order_true_deterministic_images(self):
aug = iaa.Sequential([
iaa.Affine(translate_px={"x": 1}, mode="constant", cval=0, order=0),
iaa.Fliplr(1.0)
], random_order=True)
aug = aug.to_deterministic()
frac_12 = self._test_random_order_images_frac_12(aug, 25)
assert (frac_12 >= 1.0-1e-4 or frac_12 <= 0.0+1e-4)
@classmethod
def _test_random_order_images_frac_12(cls, aug, nb_iterations):
image = np.uint8([[0, 1],
[2, 3]])
image_12 = np.uint8([[0, 0],
[2, 0]])
image_21 = np.uint8([[0, 1],
[0, 3]])
seen = [False, False]
for _ in sm.xrange(nb_iterations):
observed = aug.augment_images([image])[0]
if np.array_equal(observed, image_12):
seen[0] = True
elif np.array_equal(observed, image_21):
seen[1] = True
else:
assert False
frac_12 = seen[0] / np.sum(seen)
return frac_12
# TODO add random_order=False
def test_random_order_heatmaps(self):
aug = iaa.Sequential([
iaa.Affine(translate_px={"x": 1}),
iaa.Fliplr(1.0)
], random_order=True)
heatmaps_arr = np.float32([[0, 0, 1.0],
[0, 0, 1.0],
[0, 1.0, 1.0]])
heatmaps_arr_expected1 = np.float32([[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0]])
heatmaps_arr_expected2 = np.float32([[0.0, 1.0, 0.0],
[0.0, 1.0, 0.0],
[0.0, 1.0, 1.0]])
seen = [False, False]
for _ in sm.xrange(100):
observed = aug.augment_heatmaps([
ia.HeatmapsOnImage(heatmaps_arr, shape=(3, 3, 3))])[0]
if np.allclose(observed.get_arr(), heatmaps_arr_expected1):
seen[0] = True
elif np.allclose(observed.get_arr(), heatmaps_arr_expected2):
seen[1] = True
else:
assert False
if np.all(seen):
break
assert np.all(seen)
# TODO add random_order=False
def test_random_order_segmentation_maps(self):
aug = iaa.Sequential([
iaa.Affine(translate_px={"x": 1}),
iaa.Fliplr(1.0)
], random_order=True)
segmaps_arr = np.int32([[0, 0, 1],
[0, 0, 1],
[0, 1, 1]])
segmaps_arr_expected1 = np.int32([[0, 0, 0],
[0, 0, 0],
[1, 0, 0]])
segmaps_arr_expected2 = np.int32([[0, 1, 0],
[0, 1, 0],
[0, 1, 1]])
seen = [False, False]
for _ in sm.xrange(100):
observed = aug.augment_segmentation_maps([
SegmentationMapsOnImage(segmaps_arr, shape=(3, 3, 3))])[0]
if np.array_equal(observed.get_arr(), segmaps_arr_expected1):
seen[0] = True
elif np.array_equal(observed.get_arr(), segmaps_arr_expected2):
seen[1] = True
else:
assert False
if np.all(seen):
break
assert np.all(seen)
# TODO add random_order=False
def test_random_order_keypoints(self):
KP = ia.Keypoint
kps = [KP(0, 0), KP(2, 0), KP(2, 2)]
kps_12 = [KP((0+1)*2, 0), KP((2+1)*2, 0), KP((2+1)*2, 2)]
kps_21 = [KP((0*2)+1, 0), KP((2*2)+1, 0), KP((2*2)+1, 2)]
kpsoi = ia.KeypointsOnImage(kps, shape=(3, 3))
kpsoi_12 = ia.KeypointsOnImage(kps_12, shape=(3, 3))
kpsoi_21 = ia.KeypointsOnImage(kps_21, shape=(3, 3))
def func1(keypoints_on_images, random_state, parents, hooks):
for kpsoi in keypoints_on_images:
for kp in kpsoi.keypoints:
kp.x += 1
return keypoints_on_images
def func2(keypoints_on_images, random_state, parents, hooks):
for kpsoi in keypoints_on_images:
for kp in kpsoi.keypoints:
kp.x *= 2
return keypoints_on_images
aug_1 = iaa.Lambda(func_keypoints=func1)
aug_2 = iaa.Lambda(func_keypoints=func2)
seq = iaa.Sequential([aug_1, aug_2], random_order=True)
seen = [False, False]
for _ in sm.xrange(100):
observed = seq.augment_keypoints(kpsoi)
if np.allclose(observed.to_xy_array(), kpsoi_12.to_xy_array()):
seen[0] = True
elif np.allclose(observed.to_xy_array(), kpsoi_21.to_xy_array()):
seen[1] = True
else:
assert False
if np.all(seen):
break
assert np.all(seen)
# TODO add random_order=False
def test_random_order_polygons(self):
cba = ia.Polygon([(0, 0), (1, 0), (1, 1)])
cba_12 = ia.Polygon([(0, 0), (1, 0), ((1+1)*2, 1)])
cba_21 = ia.Polygon([(0, 0), (1, 0), ((1*2)+1, 1)])
cbaoi = ia.PolygonsOnImage([cba], shape=(3, 3))
def func1(polygons_on_images, random_state, parents, hooks):
for cbaoi_ in polygons_on_images:
for cba_ in cbaoi_.items:
cba_.exterior[-1, 0] += 1
return polygons_on_images
def func2(polygons_on_images, random_state, parents, hooks):
for cbaoi_ in polygons_on_images:
for cba_ in cbaoi_.items:
cba_.exterior[-1, 0] *= 2
return polygons_on_images
aug_1 = iaa.Lambda(func_polygons=func1)
aug_2 = iaa.Lambda(func_polygons=func2)
seq = iaa.Sequential([aug_1, aug_2], random_order=True)
seen = [False, False]
for _ in sm.xrange(100):
observed = seq.augment_polygons(cbaoi)
if np.allclose(observed.items[0].coords, cba_12.coords):
seen[0] = True
elif np.allclose(observed.items[0].coords, cba_21.coords):
seen[1] = True
else:
assert False
if np.all(seen):
break
assert np.all(seen)
# TODO add random_order=False
def test_random_order_line_strings(self):
cba = ia.LineString([(0, 0), (1, 0), (1, 1)])
cba_12 = ia.LineString([(0, 0), (1, 0), ((1+1)*2, 1)])
cba_21 = ia.LineString([(0, 0), (1, 0), ((1*2)+1, 1)])
cbaoi = ia.LineStringsOnImage([cba], shape=(3, 3))
def func1(line_strings_on_images, random_state, parents, hooks):
for cbaoi_ in line_strings_on_images:
for cba_ in cbaoi_.items:
cba_.coords[-1, 0] += 1
return line_strings_on_images
def func2(line_strings_on_images, random_state, parents, hooks):
for cbaoi_ in line_strings_on_images:
for cba_ in cbaoi_.items:
cba_.coords[-1, 0] *= 2
return line_strings_on_images
aug_1 = iaa.Lambda(func_line_strings=func1)
aug_2 = iaa.Lambda(func_line_strings=func2)
seq = iaa.Sequential([aug_1, aug_2], random_order=True)
seen = [False, False]
for _ in sm.xrange(100):
observed = seq.augment_line_strings(cbaoi)
if np.allclose(observed.items[0].coords, cba_12.coords):
seen[0] = True
elif np.allclose(observed.items[0].coords, cba_21.coords):
seen[1] = True
else:
assert False
if np.all(seen):
break
assert np.all(seen)
# TODO add random_order=False
def test_random_order_bounding_boxes(self):
bbs = [ia.BoundingBox(x1=1, y1=2, x2=30, y2=40)]
bbs_12 = [ia.BoundingBox(x1=(1+1)*2, y1=2, x2=30, y2=40)]
bbs_21 = [ia.BoundingBox(x1=(1*2)+1, y1=2, x2=30, y2=40)]
bbsoi = ia.BoundingBoxesOnImage(bbs, shape=(3, 3))
bbsoi_12 = ia.BoundingBoxesOnImage(bbs_12, shape=(3, 3))
bbsoi_21 = ia.BoundingBoxesOnImage(bbs_21, shape=(3, 3))
def func1(bounding_boxes_on_images, random_state, parents, hooks):
for bbsoi in bounding_boxes_on_images:
for bb in bbsoi.bounding_boxes:
bb.x1 += 1
return bounding_boxes_on_images
def func2(bounding_boxes_on_images, random_state, parents, hooks):
for bbsoi in bounding_boxes_on_images:
for bb in bbsoi.bounding_boxes:
bb.x1 *= 2
return bounding_boxes_on_images
aug_1 = iaa.Lambda(func_bounding_boxes=func1)
aug_2 = iaa.Lambda(func_bounding_boxes=func2)
seq = iaa.Sequential([aug_1, aug_2], random_order=True)
seen = [False, False]
for _ in sm.xrange(100):
observed = seq.augment_bounding_boxes(bbsoi)
if np.allclose(observed.to_xyxy_array(),
bbsoi_12.to_xyxy_array()):
seen[0] = True
elif np.allclose(observed.to_xyxy_array(),
bbsoi_21.to_xyxy_array()):
seen[1] = True
else:
assert False
if np.all(seen):
break
assert np.all(seen)
def test_zero_sized_axes(self):
shapes = [
(0, 0),
(0, 1),
(1, 0),
(0, 1, 0),
(1, 0, 0),
(0, 1, 1),
(1, 0, 1)
]
for shape in shapes:
for random_order in [False, True]:
with self.subTest(shape=shape):
image = np.zeros(shape, dtype=np.uint8)
aug = iaa.Sequential([iaa.Identity()],
random_order=random_order)
image_aug = aug(image=image)
assert image_aug.dtype.name == "uint8"
assert image_aug.shape == shape
def test_unusual_channel_numbers(self):
shapes = [
(1, 1, 4),
(1, 1, 5),
(1, 1, 512),
(1, 1, 513)
]
for shape in shapes:
for random_order in [False, True]:
with self.subTest(shape=shape):
image = np.zeros(shape, dtype=np.uint8)
aug = iaa.Sequential([iaa.Identity()],
random_order=random_order)
image_aug = aug(image=image)
assert np.all(image_aug == 0)
assert image_aug.dtype.name == "uint8"
assert image_aug.shape == shape
def test_add_to_empty_sequential(self):
aug = iaa.Sequential()
aug.add(iaa.Fliplr(1.0))
image = np.arange(4*4).reshape((4, 4)).astype(np.uint8)
observed = aug.augment_image(image)
assert np.array_equal(observed, np.fliplr(image))
def test_add_to_sequential_with_child(self):
aug = iaa.Sequential(iaa.Fliplr(1.0))
aug.add(iaa.Flipud(1.0))
image = np.arange(4*4).reshape((4, 4)).astype(np.uint8)
observed = aug.augment_image(image)
assert np.array_equal(observed, np.fliplr(np.flipud(image)))
def test_get_parameters(self):
aug1 = iaa.Sequential(iaa.Fliplr(1.0), random_order=False)
aug2 = iaa.Sequential(iaa.Fliplr(1.0), random_order=True)
assert aug1.get_parameters() == [False]
assert aug2.get_parameters() == [True]
def test_get_children_lists(self):
flip = iaa.Fliplr(1.0)
aug = iaa.Sequential(flip)
assert aug.get_children_lists() == [aug]
def test_to_deterministic(self):
child = iaa.Identity()
aug = iaa.Sequential([child])
aug_det = aug.to_deterministic()
assert aug_det.random_state is not aug.random_state
assert aug_det.deterministic
assert aug_det[0].deterministic
def test___str___and___repr__(self):
flip = iaa.Fliplr(1.0)
aug = iaa.Sequential(flip, random_order=True)
expected = (
"Sequential("
"name=%s, random_order=%s, children=[%s], deterministic=%s"
")" % (aug.name, "True", str(flip), "False")
)
assert aug.__str__() == aug.__repr__() == expected
def test_other_dtypes_noop__bool(self):
for random_order in [False, True]:
aug = iaa.Sequential([
iaa.Identity(),
iaa.Identity()
], random_order=random_order)
image = np.zeros((3, 3), dtype=bool)
image[0, 0] = True
image_aug = aug.augment_image(image)
assert image_aug.dtype.name == "bool"
assert np.all(image_aug == image)
def test_other_dtypes__noop__uint_int(self):
dtypes = ["uint8", "uint16", "uint32", "uint64",
"int8", "int32", "int64"]
for dtype, random_order in itertools.product(dtypes, [False, True]):
with self.subTest(dtype=dtype, random_order=random_order):
aug = iaa.Sequential([
iaa.Identity(),
iaa.Identity()
], random_order=random_order)
min_value, center_value, max_value = \
iadt.get_value_range_of_dtype(dtype)
value = max_value
image = np.zeros((3, 3), dtype=dtype)
image[0, 0] = value
image_aug = aug.augment_image(image)
assert image_aug.dtype.name == dtype
assert np.array_equal(image_aug, image)
def test_other_dtypes_noop__float(self):
dtypes = ["float16", "float32", "float64", "float128"]
values = [5000, 1000 ** 2, 1000 ** 3, 1000 ** 4]
for random_order in [False, True]:
for dtype, value in zip(dtypes, values):
with self.subTest(dtype=dtype, random_order=random_order):
aug = iaa.Sequential([
iaa.Identity(),
iaa.Identity()
], random_order=random_order)
image = np.zeros((3, 3), dtype=dtype)
image[0, 0] = value
image_aug = aug.augment_image(image)
assert image_aug.dtype.name == dtype
assert np.all(image_aug == image)
def test_other_dtypes_flips__bool(self):
for random_order in [False, True]:
# note that we use 100% probabilities with square | |
<filename>main.py
import os
from typing import Optional, Tuple, List, Type
import numpy as np
from cv2 import cvtColor, COLOR_BGR2GRAY, imshow, waitKey, imread, ORB_create, ORB, KeyPoint, drawMarker, MARKER_CROSS, \
COLOR_GRAY2BGR, BFMatcher, NORM_HAMMING, DMatch, drawMatches, \
Rodrigues, resize, INTER_AREA, solvePnPRansac, projectPoints, \
goodFeaturesToTrack, calcOpticalFlowPyrLK, TERM_CRITERIA_EPS, TERM_CRITERIA_COUNT
from recordclass import RecordClass
from collections import namedtuple
from abc import ABC, abstractmethod
from enum import Enum
import glob
radiansToDegrees: float = 1 / np.pi * 180.0
EulerAngles = namedtuple('EulerAngles', ['yaw_rad', 'pitch_rad', 'roll_rad'])
def get_intrinsic_matrix(focal_length_x_pixel: float,
focal_length_y_pixel: float,
skew: float,
principal_point_x_pixel: float,
principal_point_y_pixel: float) -> np.ndarray:
"""
This function constructs an intrinsic calibration matrix, given explicit parameters
"""
return np.array([[focal_length_x_pixel, skew, principal_point_x_pixel],
[.0, focal_length_y_pixel, principal_point_y_pixel],
[.0, .0, 1.0]])
def get_euler_angles_from_rotation_matrix_ZYX_order(rotation_matrix: np.ndarray) -> EulerAngles:
"""
Retrieve euler angles from rotation matrix (direction cosine matrix)
assuming Z-Y-X order of rotation
reference: D.H.Titterton, Strapdown Inertial Navigation, (3.66)
@param rotation_matrix, 3X3
@return: out[0] - heading euler angle [rad]
out[1] - pitch euler angle [rad]
out[2] - roll euler angle [rad]
"""
roll_rad = np.arctan2(rotation_matrix[2][1], rotation_matrix[2][2])
pitch_rad = np.arcsin(-rotation_matrix[2][0])
yaw_rad = np.arctan2(rotation_matrix[1][0], rotation_matrix[0][0])
return EulerAngles(yaw_rad=yaw_rad,
pitch_rad=pitch_rad,
roll_rad=roll_rad)
class ProcessedFrameData(RecordClass):
"""
This object aggregates frame + extracted features (keypoints and descriptors) into a single data object
"""
frame: np.ndarray
list_of_keypoints: List[KeyPoint]
descriptors: np.ndarray
@classmethod
def build(cls,
frame: np.ndarray,
list_of_keypoints: List[KeyPoint],
descriptors: np.ndarray):
return ProcessedFrameData(frame,
list_of_keypoints,
descriptors)
class RobotPose(RecordClass):
"""
This objects contains the robot pose (horizontal translation + yaw angle)
"""
position_x_meter: float
position_y_meter: float
yaw_angle_deg: float
@classmethod
def build(cls,
position_x_meter: float,
position_y_meter: float,
yaw_angle_deg: float):
return RobotPose(position_x_meter,
position_y_meter,
yaw_angle_deg)
class FeaturesHandlerAbstractBase(ABC):
"""
This is an abstract base class for a features extractor
"""
features_extractor: object
features_matcher: object
@abstractmethod
def extract_features(self, frame: np.ndarray) -> ProcessedFrameData:
"""
This method extracts features from a frame
Returns
-------
"""
pass
@abstractmethod
def match_features(self, frame_1: ProcessedFrameData, frame_2: ProcessedFrameData):
"""
This method matches features between frames
"""
@abstractmethod
def is_handler_capable(self, frame: np.ndarray) -> bool:
"""
This method tries to measure this handlers capability to track features for an unknown scene type
(i.e illumination, texture, etc)
"""
class OrbFeaturesHandler(FeaturesHandlerAbstractBase):
"""
This class implements an ORB features exractor
"""
features_extractor: ORB
features_matcher: BFMatcher
DEFAULT_DISTANCE_THRESHOLD_FOR_SUCCESSFULL_FEATURE_MATCH: int = 10
DEFAULT_NUMBER_OF_FEATURES = 1000
number_of_features: int
def __init__(self, number_of_features: int = None):
if number_of_features is None:
self.number_of_features = self.DEFAULT_NUMBER_OF_FEATURES
self.features_extractor = ORB_create(nfeatures=self.DEFAULT_NUMBER_OF_FEATURES)
# ORB uses binary descriptors -> use hamming norm (XOR between descriptors)
self.features_matcher = BFMatcher(NORM_HAMMING, crossCheck=True)
def extract_features(self, frame: np.ndarray) -> ProcessedFrameData:
"""
This method extracts ORB features
"""
list_of_keypoints, descriptors = self.features_extractor.detectAndCompute(image=frame,
mask=None)
return ProcessedFrameData.build(frame=frame,
list_of_keypoints=list_of_keypoints,
descriptors=descriptors)
def match_features(self, frame_1: ProcessedFrameData, frame_2: ProcessedFrameData):
"""
This method matches ORB features
based on https://docs.opencv.org/master/dc/dc3/tutorial_py_matcher.html
"""
list_of_matches: List[DMatch] = self.features_matcher.match(queryDescriptors=frame_1.descriptors,
trainDescriptors=frame_2.descriptors)
return sorted(list_of_matches, key=lambda x: x.distance)
# # Sort them in the order of their distance.
# return
def is_handler_capable(self, frame: np.ndarray) -> bool:
"""
This method implements ORB handler capability test
"""
extracted_features: ProcessedFrameData = self.extract_features(frame=frame)
return len(extracted_features.list_of_keypoints) >= int(0.9 * self.DEFAULT_NUMBER_OF_FEATURES)
class ShiTomasiFeaturesHandler(FeaturesHandlerAbstractBase):
"""
This class implements an Shi-Tomasi edge-detector features exractor
"""
features_extractor: object = None
features_matcher: object = None
DEFAULT_DISTANCE_THRESHOLD_FOR_SUCCESSFULL_FEATURE_MATCH: int = 10
DEFAULT_nfeatures = 1000
def extract_features(self, frame: np.ndarray) -> ProcessedFrameData:
"""
This method extracts ORB features
"""
res = goodFeaturesToTrack(image=frame,
maxCorners=self.DEFAULT_nfeatures,
qualityLevel=0.01,
minDistance=10)
list_of_keypoints: List[KeyPoint] = [KeyPoint(point[0], point[1], -1) for point in list(res.squeeze())]
return ProcessedFrameData.build(frame=frame,
list_of_keypoints=list_of_keypoints,
descriptors=res)
def is_handler_capable(self, frame: np.ndarray) -> bool:
"""
This method implements Shi-Tomasi handler capability test
"""
extracted_features: ProcessedFrameData = self.extract_features(frame=frame)
return len(extracted_features.list_of_keypoints) >= int(0.9 * self.DEFAULT_nfeatures)
def match_features(self, frame_1: ProcessedFrameData, frame_2: ProcessedFrameData):
"""
This method uses sparse optical-flow (KLT algorithm)
https://docs.opencv.org/2.4/modules/video/doc/motion_analysis_and_object_tracking.html
"""
lk_params = dict(winSize=(5, 5),
maxLevel=2,
criteria=(TERM_CRITERIA_EPS | TERM_CRITERIA_COUNT, 10, 0.03))
prev_points: np.ndarray = np.array([(point.pt[0], point.pt[1]) for point in frame_1.list_of_keypoints]).astype(
dtype=np.float32)
new_points, st, err = calcOpticalFlowPyrLK(frame_1.frame,
frame_2.frame,
prev_points,
None,
**lk_params)
list_of_matches: List[DMatch] = list()
for idx, data in enumerate(zip(st.squeeze(), err.squeeze())):
status = data[0]
err = data[1]
if status == 1:
queryIdx = idx
trainIdx = idx
distance = err
list_of_matches.append(DMatch(queryIdx, trainIdx, distance))
return list_of_matches
class MotionEstimationStatus(Enum):
"""
This enum indicates the motion estimation algorithm status
"""
MOTION_WAITING_FOR_FIRST_FRAME = 0
MOTION_NOT_UPDATED = 1
MOTION_OK = 2
MOTION_ALGORITHM_INCAPABLE = 3
class RobotHorizontalMotionEstimator:
"""
This class tracks the horizontal motion of a robot, navigating above a ground-plane,
using an incoming video stream from a down-facing camera
"""
robot_height_in_meter: float
camera_intrinsic: np.ndarray
features_handler: Type[FeaturesHandlerAbstractBase]
previous_frame_data: Optional[ProcessedFrameData]
current_frame_number: int
current_robot_pose: Optional[RobotPose]
motion_estimation_status: MotionEstimationStatus
debug_flag: bool
MARKER_TYPE_FOR_DEBUG: int = MARKER_CROSS
MARKER_SIZE_FOR_DEBUG: int = 5
RESIZE_RATIO_FOR_DEBUG: int = 2
REPROJECTION_TOLERANCE: float = 1e-10
robot_max_speed_m_sec: float
camera_fps: int
def __init__(self, robot_height_in_meter: float,
focal_length_x_pixel: float,
focal_length_y_pixel: float,
skew: float,
principal_point_x_pixel: float,
principal_point_y_pixel: float,
debug_flag: bool = False,
robot_max_speed_m_sec: float = 1.0,
camera_fps: int = 15):
self.robot_height_in_meter = robot_height_in_meter
self.robot_max_speed_m_sec = robot_max_speed_m_sec
self.camera_fps = camera_fps
self.camera_intrinsic = get_intrinsic_matrix(focal_length_x_pixel=focal_length_x_pixel,
focal_length_y_pixel=focal_length_y_pixel,
skew=skew,
principal_point_x_pixel=principal_point_x_pixel,
principal_point_y_pixel=principal_point_y_pixel)
self.current_frame_number = 0
self.init_motion_estimation()
self.init_filter_matching_parameters()
self.debug_flag = debug_flag
def init_motion_estimation(self):
"""
This method initializes the motion estimation block
"""
self.previous_frame_data = None
self.motion_estimation_status = MotionEstimationStatus.MOTION_WAITING_FOR_FIRST_FRAME
self.current_robot_pose = None
def init_features_handler(self, prepared_frame: np.ndarray):
"""
This method attempts to choose a features extractor, for an unknown environment
"""
# try ORB features handler
self.features_handler = OrbFeaturesHandler()
if self.features_handler.is_handler_capable(frame=prepared_frame):
# ORB is capable of handling current scene
print("Selecting ORB features based motion tracker with {0:d} features".format(self.features_handler.number_of_features))
return
# try ORB features handler with smaller amount of features (to handle sparse texture scene)
self.features_handler = OrbFeaturesHandler(number_of_features=500)
if self.features_handler.is_handler_capable(frame=prepared_frame):
# ORB, with reduced number of features, is capable of handling current scene
print("Selecting ORB features based motion tracker with {0:d} features".format(self.features_handler.number_of_features))
return
# try Shi-Tomasi
self.features_handler = ShiTomasiFeaturesHandler()
if self.features_handler.is_handler_capable(frame=prepared_frame):
# Shi-Tomasi is capable of handling current scene
print("Selecting Shi-Tomasi features based motion tracker")
return
# could not find suitable features handler
self.motion_estimation_status = MotionEstimationStatus.MOTION_ALGORITHM_INCAPABLE
def init_filter_matching_parameters(self):
"""
We assume that the geometric distance between feature keypoints in frames is constrained by robot max dynamics
D/H_robot = du/f_x -> max(du)=max(D)*f_x/H_robot)
D/H_robot = dv/f_y -> max(dv)=max(D)*f_y/H_robot)
max_distance_between_frames_in_pixels = max(max(du), max(dv))
"""
H_robot: float = self.robot_height_in_meter
max_D: float = self.robot_max_speed_m_sec / self.camera_fps
f_x: float = self.camera_intrinsic[0][0]
f_y: float = self.camera_intrinsic[1][1]
max_du = max_D * f_x / H_robot
max_dv = max_D * f_y / H_robot
self.max_distance_between_frames_in_pixels: float = max(max_du, max_dv)
def set_new_frame(self, raw_frame: np.ndarray):
"""
This method passes a new frame to the motion estimation class
Parameters
----------
raw_frame
Returns
-------
"""
self.current_frame_number += 1
prepared_frame: np.ndarray = self.prepare_frame(frame=raw_frame)
if self.motion_estimation_status in (MotionEstimationStatus.MOTION_WAITING_FOR_FIRST_FRAME,
MotionEstimationStatus.MOTION_ALGORITHM_INCAPABLE):
self.init_features_handler(prepared_frame)
try:
self.update_motion_using_new_frame(frame=prepared_frame)
except:
self.motion_estimation_status = MotionEstimationStatus.MOTION_NOT_UPDATED
return
def prepare_frame(self, frame: np.ndarray) -> Optional[np.ndarray]:
"""
This method prepares the received frame for subsequent processing
Returns
-------
"""
return cvtColor(frame, COLOR_BGR2GRAY)
def process_frame(self, frame: np.ndarray) -> ProcessedFrameData:
"""
This method extracts features from a frame and returns an aggregated ProcessedFrameData object
"""
return self.features_handler.extract_features(frame=frame)
def update_motion_using_new_frame(self, frame: np.ndarray):
"""
This method
Parameters
----------
frame
Returns
-------
"""
processed_frame: ProcessedFrameData = self.process_frame(frame=frame)
if self.debug_flag:
# draw features keypoints
temp_frame: np.ndarray = cvtColor(processed_frame.frame, COLOR_GRAY2BGR)
for keypoint in processed_frame.list_of_keypoints:
drawMarker(img=temp_frame,
position=(int(keypoint.pt[0]), int(keypoint.pt[1])),
color=(0, 255, 0),
markerSize=self.MARKER_SIZE_FOR_DEBUG,
markerType=self.MARKER_TYPE_FOR_DEBUG)
imshow(winname="frame #{0:d} with detected features".format(self.current_frame_number),
mat=self.resize_image(temp_frame))
if self.previous_frame_data is None:
# first frame, cannot estimate motion
print("Robot motion estimation initialized")
self.motion_estimation_status = MotionEstimationStatus.MOTION_NOT_UPDATED
else:
self.update_motion(new_processed_frame_data=processed_frame)
self.previous_frame_data = processed_frame
def resize_image(self, image: np.ndarray) -> np.ndarray:
"""
This method resizes an image, for visualization
"""
scale_percent = int(100 / self.RESIZE_RATIO_FOR_DEBUG) # percent of original size
width = int(image.shape[1] * scale_percent / 100)
height = int(image.shape[0] * scale_percent / 100)
dim = (width, height)
# resize image
return resize(image, dim, interpolation=INTER_AREA)
def update_motion(self, new_processed_frame_data: ProcessedFrameData):
"""
This method estimates the horizontal translation of a robot, navigating above a ground-plance,
given two down-facing frames captured by the robot's camera.
The robot's altitude above the ground-plane is assumed to be known and constant
"""
# match features
list_of_matches: List[DMatch] = self.features_handler.match_features(frame_1=self.previous_frame_data,
frame_2=new_processed_frame_data)
list_of_matches_to_keep: List[DMatch] = self.filter_matches(list_of_matches=list_of_matches,
new_processed_frame_data=new_processed_frame_data)
if self.debug_flag:
imshow("feature matches frame #{0:d}->#{1:d}".format(self.current_frame_number - 1,
self.current_frame_number),
mat=self.resize_image(drawMatches(self.previous_frame_data.frame,
self.previous_frame_data.list_of_keypoints,
new_processed_frame_data.frame,
new_processed_frame_data.list_of_keypoints,
list_of_matches_to_keep,
None,
flags=2)))
self.get_camera_motion_using_PnP(list_of_matches_to_keep,
new_processed_frame_data=new_processed_frame_data)
def print_current_position(self):
"""
This method prints the current position of the robot, if available
"""
if self.motion_estimation_status == MotionEstimationStatus.MOTION_OK:
print("frame #{0:d} | Robot motion: X = {1:f} [m] Y = {2:f} [m] | rotation: yaw = {3:f} [deg]".format(
self.current_frame_number,
| |
#-----------------------------------------------------------------------------
# Copyright (c) 2005-2018, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
"""
Classes facilitating communication between PyInstaller and import hooks.
PyInstaller passes instances of classes defined by this module to corresponding
functions defined by external import hooks, which commonly modify the contents
of these instances before returning. PyInstaller then detects and converts these
modifications into appropriate operations on the current `PyiModuleGraph`
instance, thus modifying which modules will be frozen into the executable.
"""
from .datastruct import TOC
from ..lib.modulegraph.modulegraph import RuntimeModule, RuntimePackage
from .utils import format_binaries_and_datas
class PreSafeImportModuleAPI(object):
"""
Metadata communicating changes made by the current **pre-safe import module
hook** (i.e., hook run immediately _before_ a call to
`ModuleGraph._safe_import_module()` recursively adding the hooked module,
package, or C extension and all transitive imports thereof to the module
graph) back to PyInstaller.
Pre-safe import module hooks _must_ define a `pre_safe_import_module()`
function accepting an instance of this class, whose attributes describe the
subsequent `ModuleGraph._safe_import_module()` call creating the hooked
module's graph node.
Each pre-safe import module hook is run _only_ on the first attempt to
create the hooked module's graph node and then subsequently ignored. If this
hook successfully creates that graph node, the subsequent
`ModuleGraph._safe_import_module()` call will observe this fact and silently
return without attempting to recreate that graph node.
Pre-safe import module hooks are typically used to create graph nodes for
**runtime modules** (i.e., modules dynamically defined at runtime). Most
modules are physically defined in external `.py`-suffixed scripts. Some
modules, however, are dynamically defined at runtime (e.g., `six.moves`,
dynamically defined by the physically defined `six.py` module). However,
`ModuleGraph` only parses `import` statements residing in external scripts.
`ModuleGraph` is _not_ a full-fledged, Turing-complete Python interpreter
and hence has no means of parsing `import` statements performed by runtime
modules existing only in-memory.
'With great power comes great responsibility.'
Attributes (Immutable)
----------------------------
The following attributes are **immutable** (i.e., read-only). For
safety, any attempts to change these attributes _will_ result in a
raised exception:
module_graph : PyiModuleGraph
Current module graph.
module_basename : str
Unqualified name of the module to be imported (e.g., `text`).
module_name : str
Fully-qualified name of this module (e.g., `email.mime.text`).
parent_package : Package
Graph node for the package providing this module _or_ `None` if this
module is a top-level module.
"""
def __init__(self, module_graph, module_basename, module_name,
parent_package):
self._module_graph = module_graph
self._module_basename = module_basename
self._module_name = module_name
self._parent_package = parent_package
# Immutable properties. No corresponding setters are defined.
@property
def module_graph(self):
"Current module graph"
return self._module_graph
#FIXME: Remove this property and publicize the "_module_name" attribute to
#"module_name". This attribute is intended to be modifiable by hooks.
@property
def module_name(self):
"Fully-qualified name of this module (e.g., `email.mime.text`)."
return self._module_name
#FIXME: Remove this property and publicize the "_module_basename" attribute
#to "module_basename". This attribute is intended to be modifiable by hooks.
@property
def module_basename(self):
"Unqualified name of the module to be imported (e.g., `text`)."
return self._module_basename
@property
def parent_package(self):
"Parent Package of this node"
return self._parent_package
def add_runtime_module(self, module_name):
"""
Add a graph node representing a non-package Python module with the
passed name dynamically defined at runtime.
Most modules are statically defined on-disk as standard Python files.
Some modules, however, are dynamically defined in-memory at runtime
(e.g., `gi.repository.Gst`, dynamically defined by the statically
defined `gi.repository.__init__` module).
This method adds a graph node representing such a runtime module. Since
this module is _not_ a package, all attempts to import submodules from
this module in `from`-style import statements (e.g., the `queue`
submodule in `from six.moves import queue`) will be silently ignored. To
circumvent this, simply call `add_runtime_package()` instead.
Parameters
----------
module_name : str
Fully-qualified name of this module (e.g., `gi.repository.Gst`).
Examples
----------
This method is typically called by `pre_safe_import_module()` hooks:
e.g.,
def pre_safe_import_module(api):
api.add_runtime_module(api.module_name)
"""
self._module_graph.add_module(RuntimeModule(module_name))
def add_runtime_package(self, package_name):
"""
Add a graph node representing a non-namespace Python package with the
passed name dynamically defined at runtime.
Most packages are statically defined on-disk as standard subdirectories
containing `__init__.py` files. Some packages, however, are dynamically
defined in-memory at runtime (e.g., `six.moves`, dynamically defined by
the statically defined `six` module).
This method adds a graph node representing such a runtime package. All
attributes imported from this package in `from`-style import statements
that are submodules of this package (e.g., the `queue` submodule in
`from six.moves import queue`) will be imported rather than ignored.
Parameters
----------
package_name : str
Fully-qualified name of this package (e.g., `six.moves`).
Examples
----------
This method is typically called by `pre_safe_import_module()` hooks:
e.g.,
def pre_safe_import_module(api):
api.add_runtime_package(api.module_name)
"""
self._module_graph.add_module(RuntimePackage(package_name))
def add_alias_module(self, real_module_name, alias_module_name):
"""
Alias the source module to the target module with the passed names.
This method ensures that the next call to findNode() given the target
module name will resolve this alias. This includes importing and adding
a graph node for the source module if needed as well as adding a
reference from the target to the source module.
Parameters
----------
real_module_name : str
Fully-qualified name of the **existing module** (i.e., the
module being aliased).
alias_module_name : str
Fully-qualified name of the **non-existent module** (i.e.,
the alias to be created).
"""
self._module_graph.alias_module(real_module_name, alias_module_name)
def append_package_path(self, directory):
"""
Modulegraph does a good job at simulating Python's, but it cannot
handle packagepath `__path__` modifications packages make at runtime.
Therefore there is a mechanism whereby you can register extra paths
in this map for a package, and it will be honored.
Parameters
----------
directory : str
Absolute or relative path of the directory to be appended to this
package's `__path__` attribute.
"""
self._module_graph.append_package_path(self._module_name, directory)
class PreFindModulePathAPI(object):
"""
Metadata communicating changes made by the current **pre-find module
path hook** (i.e., hook run immediately _before_ a call to
`ModuleGraph._find_module_path()` finding the hooked module's absolute
path) back to PyInstaller.
Pre-find module path hooks _must_ define a `pre_find_module_path()`
function accepting an instance of this class, whose attributes describe the
subsequent `ModuleGraph._find_module_path()` call to be performed.
Pre-find module path hooks are typically used to change the absolute
path from which a module will be subsequently imported and thus frozen into
the executable. To do so, hooks may overwrite the default `search_dirs` list
of the absolute paths of all directories to be searched for that module:
e.g.,
def pre_find_module_path(api):
api.search_dirs = ['/the/one/true/package/providing/this/module']
Each pre-find module path hook is run _only_ on the first call to
`ModuleGraph._find_module_path()` for the corresponding module.
Attributes
----------
The following attributes are **mutable** (i.e., modifiable). All changes to
these attributes will be immediately respected by PyInstaller:
search_dirs : list
List of the absolute paths of all directories to be searched for this
module (in order). Searching will halt at the first directory containing
this module.
Attributes (Immutable)
----------
The following attributes are **immutable** (i.e., read-only). For safety,
any attempts to change these attributes _will_ result in a raised exception:
module_name : str
Fully-qualified name of this module.
module_graph : PyiModuleGraph
Current module graph. For efficiency, this attribute is technically
mutable. To preserve graph integrity, this attribute should nonetheless
_never_ be modified. While read-only `PyiModuleGraph` methods (e.g.,
`findNode()`) are safely callable from within pre-find module path
hooks, methods modifying the graph are _not_. If graph modifications are
required, consider an alternative type of hook (e.g., pre-import module
hooks).
"""
def __init__(
self,
module_graph,
module_name,
search_dirs,
):
# Mutable attributes.
self.search_dirs = search_dirs
# Immutable attributes.
self._module_graph = module_graph
self._module_name = module_name
# Immutable properties. No corresponding setters are defined.
@property
def module_graph(self):
"""
Current module graph
"""
return self._module_graph
@property
def module_name(self):
"""
Fully-qualified name of this module.
"""
return self._module_name
class PostGraphAPI(object):
"""
Metadata communicating changes made by the current **post-graph hook**
(i.e., hook run for a specific module transitively imported by the current
application _after_ the module graph of all `import` statements performed by
this application has been constructed) back to PyInstaller.
Post-graph hooks may optionally define a `post_graph()` function accepting
an instance | |
Soft Blue": 0xACE1F0,
"Atoll": 0x2B797A,
"Atoll Sand": 0xFFCF9E,
"Atom Blue": 0x8F9CAC,
"Atomic": 0x3D4B52,
"Atomic Lime": 0xB9FF03,
"Atomic Pink": 0xFB7EFD,
"Atomic Tangerine": 0xFF9966,
"Atrium White": 0xF1EEE4,
"Attar of Rose": 0x994240,
"Attica": 0xA1BCA9,
"Attitude": 0xA48884,
"Attitude Gray": 0x7C7D75,
"Attorney": 0x3F4258,
"Au Chico": 0x9E6759,
"Au Gratin": 0xFF9D45,
"Au Natural": 0xE5E1CE,
"Au Naturel": 0xE8CAC0,
"Auberge": 0x3F3130,
"Aubergine": 0x372528,
"Aubergine Flesh": 0xF2E4DD,
"Aubergine Green": 0x8B762C,
"Aubergine Grey": 0x6E5861,
"Aubergine Mauve": 0x3B2741,
"Aubergine Perl": 0x5500AA,
"Auburn": 0x712F2C,
"Auburn Glaze": 0xB58271,
"Auburn Lights": 0x78342F,
"Auburn Wave": 0xD8A394,
"Audition": 0xB5ACB7,
"Audrey's Blush": 0xAE8087,
"Auger Shell": 0x9F9292,
"August Moon": 0xE6E1D6,
"August Morning": 0xFFD79D,
"Aumbry": 0x7C7469,
"Aunt Violet": 0x7C0087,
"Aura": 0xB2A8A1,
"Aura Orange": 0xB4262A,
"Aura White": 0xDEE2E4,
"Aureolin": 0xFDEE00,
"Auric": 0xC48919,
"Auric Armour Gold": 0xE8BC6D,
"Auricula Purple": 0x533552,
"AuroMetalSaurus": 0x6E7F80,
"Aurora": 0xEDDD59,
"Aurora Brown": 0x6A4238,
"Aurora Green": 0x6ADC99,
"Aurora Grey": 0xD3C5C4,
"Aurora Magenta": 0x963B60,
"Aurora Orange": 0xEC7042,
"Aurora Pink": 0xE881A6,
"Aurora Red": 0xB93A32,
"Aurora Splendor": 0x595682,
"Austere": 0x726848,
"Austere Gray": 0xBEBFB2,
"Australian Jade": 0x84A194,
"Australian Mint": 0xEFF8AA,
"Australien": 0xCC9911,
"Austrian Ice": 0xDEE6E7,
"<NAME>": 0x6B5446,
"Auth<NAME>an": 0xEADDC6,
"Autonomous": 0xC6C7C5,
"Autumn Air": 0xD2A888,
"Autumn Apple Yellow": 0xCDA449,
"Autumn Arrival": 0xF9986F,
"Autumn Ashes": 0x816B68,
"Autumn Avenue": 0xE3AD59,
"Autumn Bark": 0x9D6F46,
"Autumn Blaze": 0xD9922E,
"Autumn Blonde": 0xEED0AE,
"Autumn Bloom": 0xFFE0CB,
"Autumn Blush": 0xE4D1C0,
"Autumn Child": 0xFBE6C1,
"Autumn Crocodile": 0x447744,
"Autumn Fall": 0x67423B,
"Autumn Fern": 0x507B49,
"Autumn Fest": 0xBE7D33,
"Autumn Festival": 0xA28B36,
"Autumn Glaze": 0xB3573F,
"Autumn Glory": 0xFF8812,
"Autumn Glow": 0xE5C382,
"Autumn Gold": 0x7D623C,
"Autumn Gourd": 0xE6AE76,
"Autumn Grey": 0xB2ABA7,
"Autumn Haze": 0xD4C2B1,
"Autumn Hills": 0x784F50,
"Autumn Laurel": 0x9D8D66,
"Autumn Leaf": 0xB56A4C,
"Autumn Leaf Brown": 0x7A560E,
"Autumn Leaf Orange": 0xD07A04,
"Autumn Leaf Red": 0x623836,
"Autumn Leaves": 0x6E4440,
"Autumn Malt": 0xCEA48E,
"Autumn Maple": 0xC46215,
"Autumn Meadow": 0xACB78E,
"Autumn Mist": 0xF7B486,
"Autumn Night": 0x3B5861,
"Autumn Orange": 0xEE9950,
"Autumn Orchid": 0x9D9093,
"Autumn Pine Green": 0x158078,
"Autumn Red": 0x99451F,
"Autumn Ridge": 0x9B423F,
"Autumn Robin": 0xC2452D,
"Autumn Russet": 0xA4746E,
"Autumn Sage": 0xAEA26E,
"Autumn Sunset": 0xF38554,
"Autumn Umber": 0xAE704F,
"Autumn White": 0xFAE2CF,
"Autumn Wind": 0xFBD1B6,
"Autumn Wisteria": 0xC9A0DC,
"Autumn Yellow": 0xE99700,
"Autumn's Hill": 0xBA7A61,
"Autumnal": 0xA15325,
"Avagddu Green": 0x106B21,
"Avalon": 0x799B96,
"Avant-Garde Pink": 0xFF77EE,
"Aventurine": 0x576E6A,
"Avenue Tan": 0xD2C2B0,
"Averland Sunset": 0xFFAA1D,
"Aviary Blue": 0xC6E3E8,
"Avid Apricot": 0xF4C69F,
"Aviva": 0xC5B47F,
"Avocado": 0x568203,
"Avocado Cream": 0xB7BF6B,
"Avocado Dark Green": 0x3E4826,
"Avocado Green": 0x87A922,
"Avocado Pear": 0x555337,
"Avocado Peel": 0x39373B,
"Avocado Toast": 0x90B134,
"Avocado Whip": 0xCDD6B1,
"Awaken": 0xA7A3BB,
"Awakened": 0xE3DAE9,
"Awakening": 0xBB9E9B,
"Award Blue": 0x315886,
"Award Night": 0x54617D,
"Award Winning White": 0xFEF0DE,
"Awareness": 0xE3EBB1,
"Awesome Aura": 0xCCC1DA,
"Awesome Violet": 0xA7B2D4,
"Awkward Purple": 0xD208CC,
"Awning Red": 0x90413E,
"Axe Handle": 0x6B4730,
"Axinite": 0x756050,
"Axis": 0xBAB6CB,
"Axolotl": 0xFFF0DF,
"Ayahuasca Vine": 0x665500,
"Ayame Iris": 0x763568,
"Ayrshire": 0xA07254,
"Azalea": 0xD42E5B,
"Azalea Flower": 0xEFC0CB,
"Azalea Leaf": 0x4A6871,
"Azalea Pink": 0xF9C0C4,
"Azeitona": 0xA5B546,
"Azores Blue": 0x0085A7,
"Azraq Blue": 0x4C6CB3,
"Azshara Vein": 0xB13916,
"Aztec": 0x293432,
"Aztec Aura": 0xFFEFBC,
"Aztec Brick": 0x9E8352,
"Aztec Glimmer": 0xE7B347,
"Aztec Gold": 0xC39953,
"Aztec Jade": 0x33BB88,
"Aztec Sky": 0x4DB5D7,
"Aztec Temple": 0x84705B,
"Aztec Turquoise": 0x00D6E2,
"Aztec Warrior": 0xBB0066,
"Azuki Bean": 0x96514D,
"Azuki Red": 0x672422,
"Azul": 0x1D5DEC,
"Azul Caribe": 0x0089C4,
"Azul Cielito Lindo": 0xC9E3EB,
"Azul Pavo Real": 0x537FAF,
"Azul Petróleo": 0x36454F,
"Azul Primavera": 0xE2EFF2,
"Azul Tequila": 0xC0CFC7,
"Azul Turquesa": 0x6ABAC4,
"Azure": 0x007FFF,
"Azure Blue": 0x4D91C6,
"Azure Dragon": 0x053976,
"Azure Green Blue": 0x006C81,
"Azure Hint": 0xDDDCE1,
"Azure Lake": 0x7BBBC8,
"Azure Mist": 0xF0FFF1,
"Azure Radiance": 0x007F1F,
"Azure Sky": 0xB0E0F6,
"Azure Tide": 0x2B9890,
"Azurean": 0x59BAD9,
"Azureish White": 0xDBE9F4,
"Azuremyst Isle": 0xCC81F0,
"Azurite Water Green": 0x497F73,
"B'dazzled Blue": 0x2E5894,
"Baal Red Wash": 0x610023,
"Baba Ganoush": 0xEEBB88,
"Babbling Brook": 0xBECFCD,
"Babbling Creek": 0xA7BAD3,
"Babe": 0xDC7B7C,
"Babiana": 0x876FA3,
"Baby Aqua": 0xABCCC3,
"Baby Artichoke": 0xE9E3CE,
"Baby Barn Owl": 0xC3C3B8,
"Baby Bear": 0x6F5944,
"Baby Berries": 0x9C4A62,
"Baby Blossom": 0xFAEFE9,
"Baby Blue": 0xA2CFFE,
"Baby Blue Eyes": 0xA1CAF1,
"Baby Bok Choy": 0xBBB98A,
"Baby Breath": 0xF0D0B0,
"Baby Bunting": 0xABCAEA,
"Baby Burro": 0x8C665C,
"Baby Cake": 0x87BEA3,
"Baby Chick": 0xFFEDA2,
"Baby Fish Mouth": 0xF3ACB9,
"Baby Frog": 0xC8BA63,
"Baby Girl": 0xFFDFE8,
"Baby Grass": 0x8ABD7B,
"Baby Green": 0x8CFF9E,
"Baby Jane": 0xD0A7A8,
"Baby Melon": 0xFFA468,
"Baby Motive": 0x8FCBDC,
"Baby Pink": 0xFFB7CE,
"Baby Powder": 0xFEFEFA,
"Baby Purple": 0xCA9BF7,
"Baby Seal": 0xA1A5A8,
"Baby Shoes": 0x005784,
"Baby Spinach": 0x89A882,
"Baby Sprout": 0xA78B81,
"Baby Steps": 0xF5C9DA,
"Baby Tears": 0x66B9D6,
"Baby Tone": 0xDCC2CB,
"Baby Tooth": 0xEEFFDD,
"Baby Vegetable": 0x5D6942,
"Baby's Blanket": 0xFFAEC1,
"Baby's Booties": 0xE8C1C2,
"Baby's Breath": 0xD8E4E8,
"Babyccino": 0xEECCBB,
"Baca Berry": 0x945759,
"Bacchanalia Red": 0x8A3A3C,
"Bachelor Blue": 0x8FAACA,
"Bachelor Button": 0x4ABBD5,
"Bachimitsu Gold": 0xFDDEA5,
"Back In Black": 0x16141C,
"Back Stage": 0x6B625B,
"Back To Basics": 0x726747,
"Back to Nature": 0xBDB98F,
"Back to School": 0xC1853B,
"Backcountry": 0x7C725F,
"Backdrop": 0xA7A799,
"Backlight": 0xFCF0E5,
"Backwater": 0x687078,
"Backwoods": 0x4A6546,
"Backyard": 0x879877,
"Bacon Strips": 0xDF3F32,
"Bad Hair Day": 0xF1C983,
"Bad Moon Yellow": 0xF2E5B4,
"Badab Black Wash": 0x0A0908,
"Badlands Orange": 0xFF6316,
"Badlands Sunset": 0x936A5B,
"Badshahi Brown": 0xD3A194,
"Bag of Gold": 0xE1BD88,
"Bagel": 0xF6CD9B,
"Bagpiper": 0x1C5544,
"Baguette": 0xB5936A,
"Bahama Blue": 0x25597F,
"Bahaman Bliss": 0x3FA49B,
"Baharroth Blue": 0x58C1CD,
"Bahia": 0xA9C01C,
"Bahia Grass": 0xC4C5AD,
"Bái Sè White": 0xECEFEF,
"Baikō Brown": 0x887938,
"Bailey Bells": 0x8A8EC9,
"Bainganī": 0x8273FD,
"Baize": 0x4B5445,
"Baize Green": 0xC7CDA8,
"Baja": 0xD2C1A8,
"Baja Blue": 0x66A6D9,
"Baja White": 0xFFF8D1,
"Baked Apple": 0xB34646,
"Baked Bean": 0xB2754D,
"Baked Biscotti": 0xDAD3CC,
"Baked Bread": 0xDACBA9,
"Baked Brie": 0xEDE9D7,
"Baked Clay": 0x9C5642,
"Baked Cookie": 0x89674A,
"Baked Potato": 0xB69E87,
"Baked Salmon": 0xDF9876,
"Baked Scone": 0xE5D3BC,
"Baked Sienna": 0x9B775E,
"Bakelite": 0xE6D4A5,
"Bakelite Gold": 0xD7995D,
"Bakelite Yellow": 0xC6B788,
"Baker-Miller Pink": 0xFF92AE,
"Baker's Chocolate": 0x5C3317,
"Bakery Box": 0xF0F4F2,
"Bakery Brown": 0xAB9078,
"Baklava": 0xEFB435,
"Bakos Blue": 0x273F4B,
"Balance": 0xD1DBC2,
"Balance Green": 0xC3C5A7,
"Balanced": 0xD7D2D1,
"Balanced Beige": 0xC0B2A2,
"Balboa": 0xAFD3DA,
"Balcony Rose": 0xE2BCB8,
"Balcony Sunset": 0xD78E6B,
"Baleine Blue": 0x155187,
"Bali Batik": 0x6F5937,
"Bali Bliss": 0x5E9EA0,
"Bali Deep": 0x8A8E93,
"Bali Hai": 0x849CA9,
"Bali Sand": 0xF6E8D5,
"Balinese Sunset": 0xF1A177,
"Ball Blue": 0x21ABCD,
"Ball Gown": 0x525661,
"Ballad": 0xCAB6C6,
"Ballad Blue": 0xC0CEDA,
"Ballerina": 0xF2CFDC,
"Ballerina Beauty": 0xE8DED6,
"Ballerina Gown": 0xF9EAEA,
"Ballerina Pink": 0xF7B6BA,
"Ballerina Silk": 0xF0DEE0,
"Ballerina Tears": 0xF2BBB1,
"Ballerina Tutu": 0xC8647F,
"Ballet Blue": 0xAFC4D9,
"Ballet Cream": 0xFC8258,
"Ballet Rose": 0xD3ADB1,
"Ballet Shoes": 0xEDB9BD,
"Ballet Slipper": 0xEBCED5,
"Ballet White": 0xF2E7D8,
"Ballie Scott Sage": 0xB2B29C,
"Ballroom Blue": 0xA6B3C9,
"Ballyhoo": 0x58A83B,
"Balmy": 0xC5D8DE,
"Balmy Seas": 0xB4DCD3,
"Balor Brown": 0x9C6B08,
"Balsa Stone": 0xCBBB92,
"Balsam": 0xBEC4B7,
"Balsam Fir": 0x909E91,
"Balsam Green": 0x576664,
"Balsam Pear": 0xB19338,
"Balsamic Reduction": 0x434340,
"Balthasar Gold": 0xA47552,
"Baltic": 0x279D9F,
"Baltic Blue": 0x6C969A,
"Baltic Bream": 0x9FBBDA,
"Baltic Green": 0x3AA098,
"Baltic Prince": 0x135952,
"Baltic Sea": 0x3C3D3E,
"Baltic Trench": 0x125761,
"Baltic Turquoise": 0x00A49A,
"Bambino": 0x8EDACC,
"Bamboo": 0xE3DEC6,
"Bamboo Beige": 0xC1ABA0,
"Bamboo Brown": 0xC87F00,
"Bamboo Charcoal": 0x454A48,
"Bamboo Forest": 0xB1A979,
"Bamboo Grass Green": 0x82994C,
"Bamboo Leaf": 0x99B243,
"Bamboo Mat": 0xE5DA9F,
"Bamboo Screen": 0xBCAB8C,
"Bamboo Shoot": 0xA3B6A4,
"Bamboo White": 0xC6CFAD,
"Bamboo Yellow": 0xAE884B,
"Banafš Violet": 0x5A1991,
"Banafsaji Purple": 0xA50B5E,
"Banana": 0xFFFC79,
"Banana Bandanna": 0xF8F739,
"Banana Biscuit": 0xFFDE7B,
"Banana Blossom": 0x933E49,
"Banana Boat": 0xFDC838,
"Banana Bread": 0xFFCF73,
"Banana Brick": 0xE8D82C,
"Banana Brulee": 0xF7EAB9,
"Banana Chalk": 0xD6D963,
"Banana Clan": 0xEEDD00,
"Banana Cream": 0xFFF49C,
"Banana Crepe": 0xE7D3AD,
"Banana Custard": 0xFCF3C5,
"Banana Farm": 0xFFDF38,
"Banana Flash": 0xEEFF00,
"Banana Ice Cream": 0xF1D3B2,
"Banana Leaf": 0x9D8F3A,
"Banana Mania": 0xFBE7B2,
"Banana Mash": 0xFAFE4B,
"Banana Milkshake": 0xEDE6CB,
"Banana Palm": 0x95A263,
"Banana Peel": 0xFFE774,
"Banana Pepper": 0xFDD630,
"Banana Pie": 0xF7EFD7,
"Banana Powder": 0xD0C101,
"Banana Pudding": 0xF4EFC3,
"Banana Puree": 0xB29705,
"Banana Sparkes": 0xF6F5D7,
"Banana Split": 0xF7EEC8,
"Banana Yellow": 0xFFE135,
"Banana Yogurt": 0xFAE7B5,
"Bananarama": 0xE4D466,
"Bananas Foster": 0xDCBE97,
"Bancroft Village": 0x816E54,
"Banded Tulip": 0xE0D3BD,
"Bandicoot": 0x878466,
"Baneblade Brown": 0x937F6D,
"Bangalore": 0xBBAA88,
"Bangladesh Green": 0x006A4F,
"Banh Bot Loc Dumpling": 0xD2B762,
"Banished Brown": 0x745E6F,
"Bank Blue": 0x3E4652,
"Bank Vault": 0x757374,
"Banksia": 0xA6B29A,
"Banksia Leaf": 0x4B5539,
"Banner Gold": 0xA28557,
"Bannister Brown": 0x806B5D,
"Bannister White": 0xE1E0D6,
"Banshee": 0xDAF0E6,
"Banyan Serenity": 0x98AB8C,
"Bara Red": 0xE9546B,
"Baragon Brown": 0x551100,
"Barbados": 0x3E6676,
"Barbados Bay": 0x006665,
"Barbados Beige": 0xB8A983,
"Barbados Blue": 0x2766AC,
"Barbados Cherry": 0xAA0A27,
"Barbarian Flesh": 0xF78C5A,
"Barbarian Leather": 0xA17308,
"Barbarossa": 0xA84734,
"Barbecue": 0xC26157,
"Barberry": 0xEE1133,
"Barberry Bush": 0xD2C61F,
"Barberry Sand": 0xE1D4BC,
"Barberry Yellow": 0xF3BD32,
"Barbie Pink": 0xFE46A5,
"Barcelona Beige": 0xC4B39C,
"Barcelona Brown": 0x926A46,
"Bare": 0x817E6D,
"Bare Beige": 0xE8D3C9,
"Bare Bone": 0xEEDDCC,
"Bare Pink": 0xF2E1DD,
"Barely Aqua": 0xBAE9E0,
"Barely Bloomed": 0xDDAADD,
"Barely Blue": 0xDDE0DF,
"Barely Brown": 0xDD6655,
"Barely Butter": 0xF8E9C2,
"Barely Mauve": 0xCCBDB9,
"Barely Peach": 0xFFE9C7,
"Barely | |
import os
import time
import numpy as np
import scipy as sci
import scipy.misc
import pykep as pk
from hodographicShaping_SI import hodographicShaping
from integration import integrate
from shapingFunctions import shapeFunctions
from shapingFunctions import shapeFunctionsFree
from patchedTrajectoryUtils import *
from conversions import *
def fullTest1(rtol = 1e-3, atol = 1e-2):
depMjd = 8002
tof = 1500
N = 3
arrMjd = depMjd + tof
depBody = 'earth'
arrBody = 'mars'
scStateDep, __, __ = ephemeris(depBody, depMjd)
scStateArr, __, __ = ephemeris(arrBody, arrMjd)
transfer = hodographicShaping(scStateDep, scStateArr,
departureBody='earth', arrivalBody='mars',
departureDate=8002, tof=1500, N=3,
rShape = 'CPowPow2_scaled',
thetaShape = 'CPowPow2_scaled',
zShape = 'CosR5P3CosR5P3SinR5_scaled',
rShapeFree = 'PSin05PCos05_scaled',
thetaShapeFree = 'PSin05PCos05_scaled',
zShapeFree = 'P4CosR5P4SinR5_scaled',
rFreeC = [137, 178],
thetaFreeC = [100, 1364],
zFreeC = [32, 283],
)
transfer.shapingRadial()
transfer.shapingVertical()
transfer.shapingTransverse()
transfer.assembleThrust()
transfer.checkBoundaryConditions()
transfer.evaluate(evalThrust='Grid', printTime=False, nEvalPoints = 1000)
testPoints = [transfer.psiTransfer, transfer.deltaV, transfer.maxThrust]
# print(testPoints)
knownPoints = [3.858953628300317, 273559.833321966, 0.003146874363948356]
test = np.allclose(testPoints, knownPoints, rtol, atol)
if test == True:
print('OK\tFull test 1 (earth-mars, departureDate=8002, tof=1500, N=3)')
else:
print('ERROR\tFull test 1 (earth-mars, departureDate=8002,',
'tof=1500, N=3)')
print('\tComputed: ', testPoints)
print('\tExpected: ', knownPoints)
def fullTest2(rtol = 1e-3, atol = 1e-2):
# trajectory settings
depMjd = 10025
tof = 1050
N = 2
arrMjd = depMjd + tof
depBody = 'earth'
arrBody = 'mars'
# departure and arrival states: rendezvous
scStateDep, __, __ = ephemeris(depBody, depMjd)
scStateArr, __, __ = ephemeris(arrBody, arrMjd)
transfer = hodographicShaping(scStateDep, scStateArr,
departureDate=depMjd, tof=tof, N=N,
departureBody = depBody,
arrivalBody = arrBody,
rShape = 'CPowPow2_scaled',
thetaShape = 'CPowPow2_scaled',
zShape = 'CosR5P3CosR5P3SinR5_scaled',
rShapeFree = 'PSin05PCos05_scaled',
thetaShapeFree = 'PSin05PCos05_scaled',
zShapeFree = 'P4CosR5P4SinR5_scaled',
rFreeC = [0, 0],
thetaFreeC = [0, 0],
zFreeC = [0, 0],
)
transfer.shapingRadial()
transfer.shapingVertical()
transfer.shapingTransverse()
transfer.assembleThrust()
transfer.checkBoundaryConditions()
transfer.evaluate(evalThrust='Grid', printTime=False, nEvalPoints = 1000)
testPoints = [transfer.psiTransfer, transfer.deltaV, transfer.maxThrust]
# print(testPoints)
knownPoints = [2.573118029750549, 6338.852298544599, 0.00015143346642256032]
test = np.allclose(testPoints, knownPoints, rtol, atol)
if test == True:
print('OK\tFull test 2 (earth-mars, departureDate=10025,',
'tof=1050, N=2)')
else:
print('ERROR\tFull test 2 (earth-mars, departureDate=10025,',
'tof=1050, N=2)')
print('\tComputed: ', testPoints)
print('\tExpected: ', knownPoints)
def fullTest3(rtol = 1e-3, atol = 1e-2):
# trajectory settings
depMjd = 12000
tof = 300
N = 1
arrMjd = depMjd + tof
depBody = 'earth'
arrBody = 'venus'
# departure and arrival states: rendezvous
scStateDep, __, __ = ephemeris(depBody, depMjd)
scStateArr, __, __ = ephemeris(arrBody, arrMjd)
transfer = hodographicShaping(scStateDep, scStateArr,
departureDate=depMjd, tof=tof, N=N,
departureBody = depBody,
arrivalBody = arrBody,
rShape = 'CPowPow2_scaled',
thetaShape = 'CPowPow2_scaled',
zShape = 'CPowPow2_scaled',
rShapeFree = 'PSin05PCos05_scaled',
thetaShapeFree = 'PSin05PCos05_scaled',
zShapeFree = 'P4CosR5P4SinR5_scaled',
rFreeC = [2, 20000],
thetaFreeC = [173, 3460],
zFreeC = [1990, 3333],
)
transfer.shapingRadial()
transfer.shapingVertical()
transfer.shapingTransverse()
transfer.assembleThrust()
transfer.checkBoundaryConditions()
transfer.evaluate(evalThrust='Grid', printTime=False, nEvalPoints = 1000)
testPoints = [transfer.psiTransfer, transfer.deltaV, transfer.maxThrust]
# print(testPoints)
knownPoints = [0.71804214972971, 22535.748458173402, 0.0012091070214979359]
test = np.allclose(testPoints, knownPoints, rtol, atol)
if test == True:
print('OK\tFull test 3 (earth-venus, departureDate=12000,',
'tof=300, N=1)')
else:
print('ERROR\tFull test 3 (earth-venus, departureDate=12000,',
'tof=300, N=1)')
print('\tComputed: ', testPoints)
print('\tExpected: ', knownPoints)
def fullTest4(rtol = 1e-3, atol = 1e-2):
# trajectory settings
depMjd = 9453
tof = 844
N = 1
arrMjd = depMjd + tof
depBody = '3'
arrBody = '4'
ephems = 'spice'
# departure and arrival states: rendezvous
scStateDep, __, __ = ephemeris(depBody, depMjd, mode=ephems)
scStateArr, __, __ = ephemeris(arrBody, arrMjd, mode=ephems)
transfer = hodographicShaping(scStateDep, scStateArr,
departureDate=depMjd, tof=tof, N=N,
departureBody = depBody,
arrivalBody = arrBody,
rShape = 'CPowPow2_scaled',
thetaShape = 'CPowPow2_scaled',
zShape = 'CosR5P3CosR5P3SinR5_scaled',
rShapeFree = 'PSin05PCos05_scaled',
thetaShapeFree = 'PSin05PCos05_scaled',
zShapeFree = 'P4CosR5P4SinR5_scaled',
rFreeC = [137, 178],
thetaFreeC = [100, 1364],
zFreeC = [32, 283],
)
transfer.shapingRadial()
transfer.shapingVertical()
transfer.shapingTransverse()
transfer.assembleThrust()
transfer.checkBoundaryConditions()
transfer.evaluate(evalThrust='Grid', printTime=False, nEvalPoints = 1000)
testPoints = [transfer.psiTransfer, transfer.deltaV, transfer.maxThrust]
knownPoints = [5.213985160376169, 45508.713465061395, 0.0010283457257095393]
test = np.allclose(testPoints, knownPoints, rtol, atol)
if test == True:
print('OK\tFull test 4 (3-4, departureDate=9453, tof=844, N=1)')
else:
print('ERROR\tFull test 4 (3-4, departureDate=9453, tof=844, N=1)')
print('\tComputed: ', testPoints)
print('\tExpected: ', knownPoints)
def boundaryConditionTestVel(rtol = 1e-3, atol = 1e-2):
# trajectory settings
depMjd = 10025
tof = 1050
N = 2
arrMjd = depMjd + tof
depBody = 'earth'
arrBody = 'mars'
ephems = 'jpl'
# departure and arrival states: rendezvous
scStateDep, __, __ = ephemeris(depBody, depMjd, mode=ephems)
scStateArr, __, __ = ephemeris(arrBody, arrMjd, mode=ephems)
transfer = hodographicShaping(scStateDep, scStateArr,
departureDate=depMjd, tof=tof, N=N,
departureBody = depBody,
arrivalBody = arrBody,
rShape = 'CPowPow2_scaled',
thetaShape = 'CPowPow2_scaled',
zShape = 'CosR5P3CosR5P3SinR5_scaled',
rShapeFree = 'PSin05PCos05_scaled',
thetaShapeFree = 'PSin05PCos05_scaled',
zShapeFree = 'P4CosR5P4SinR5_scaled',
rFreeC = [0, 0],
thetaFreeC = [0, 0],
zFreeC = [0, 0],
)
transfer.shapingRadial()
transfer.shapingVertical()
transfer.shapingTransverse()
testPoints = [transfer.rDot(transfer.tofSec),
transfer.tDot(transfer.tofSec),
transfer.zDot(transfer.tofSec)]
# print(testPoints)
knownPoints = [2161.3061456572896, 24897.147647368587, 802.1571912246434]
test = np.allclose(testPoints, knownPoints, rtol, atol)
if test == True:
print('OK\tBoundary conditions velocity test')
else:
print('ERROR\tBoundary conditions velocity test')
print('\tComputed: ', testPoints)
print('\tExpected: ', knownPoints)
def boundaryConditionTestPos(rtol = 1e-3, atol = 1e-2):
# trajectory settings
depMjd = 10025
tof = 1050
N = 2
arrMjd = depMjd + tof
depBody = 'earth'
arrBody = 'mars'
ephems = 'jpl'
# departure and arrival states: rendezvous
scStateDep, __, __ = ephemeris(depBody, depMjd, mode=ephems)
scStateArr, __, __ = ephemeris(arrBody, arrMjd, mode=ephems)
transfer = hodographicShaping(scStateDep, scStateArr,
departureDate=depMjd, tof=tof, N=N,
departureBody = depBody,
arrivalBody = arrBody,
rShape = 'CPowPow2_scaled',
thetaShape = 'CPowPow2_scaled',
zShape = 'CosR5P3CosR5P3SinR5_scaled',
rShapeFree = 'PSin05PCos05_scaled',
thetaShapeFree = 'PSin05PCos05_scaled',
zShapeFree = 'P4CosR5P4SinR5_scaled',
rFreeC = [0, 0],
thetaFreeC = [0, 0],
zFreeC = [0, 0],
)
transfer.shapingRadial()
transfer.shapingVertical()
transfer.shapingTransverse()
testPoints = [transfer.r(transfer.tofSec),
transfer.t(transfer.tofSec),
# 0,
transfer.z(transfer.tofSec)]
# print(testPoints)
knownPoints = [219832470724.13513, 13.418874318624699, -77456667.88140798]
test = np.allclose(testPoints, knownPoints, rtol, atol)
if test == True:
print('OK\tBoundary conditions position test')
else:
print('ERROR\tBoundary conditions position test')
print('\tComputed: ', testPoints)
print('\tExpected: ', knownPoints)
def boundaryConditionComparison():
# trajectory settings
depMjd = 3421
tof = 349
N = 1
arrMjd = depMjd + tof
depBody = '3'
arrBody = '4'
ephems = 'spice'
# departure and arrival states: rendezvous
scStateDep, __, __ = ephemeris(depBody, depMjd, mode=ephems)
scStateArr, __, __ = ephemeris(arrBody, arrMjd, mode=ephems)
transfer = hodographicShaping(scStateDep, scStateArr)
transfer.shapingRadial()
transfer.shapingVertical()
transfer.shapingTransverse()
transfer.assembleThrust()
transfer.checkBoundaryConditions()
if transfer.velCompare == True and transfer.posCompare == True:
print('OK\tBoundary conditions comparison (built-in)')
else:
print('ERROR\tBoundary conditions comparison (built-in)')
def integrationTest1(rtol = 1e-3, atol = 1e-2):
func1 = lambda x: x**2 * np.cos(x)
x0 = 0
x1 = 100
nSteps = int(1e5)
# quad is reference solution
intResult1 = integrate(func1, x0, x1, method='quad')
# compute same result using other methods
intResult2 = integrate(func1, x0, x1, method='trapz', nSteps=nSteps)
test = np.allclose(intResult1, intResult2, rtol, atol)
if test == True:
print('OK\tIntegration test')
else:
print('ERROR\tIntegration test')
print('\tComputed quad:\t', intResult1)
print('\tComputed others:', intResult2)
def checkShapes(rtol = 1e-3, atol = 1e-2):
def aNum(function, t):
'''
Numerical derivative
'''
a = sci.misc.derivative(function, t, 1e-5)
return a
def sNum(function, t):
'''
Numerical integral
'''
s, err = sci.integrate.quad(function, 0, t)
return s
def sampleShapes(shape, tEval, nShapes=3):
'''
Samples a set of shape functions (integral and derivative)
at the points in tEval
Also computes numerical approximations of integrals and derivatives
'''
# sample position
Iv1 = np.zeros(np.shape(tEval))
Iv2 = np.zeros(np.shape(tEval))
Iv1num = np.zeros(np.shape(tEval))
Iv2num = np.zeros(np.shape(tEval))
Dv1 = np.zeros(np.shape(tEval))
Dv2 = np.zeros(np.shape(tEval))
Dv1num = np.zeros(np.shape(tEval))
Dv2num = np.zeros(np.shape(tEval))
if nShapes == 3:
Iv3 = np.zeros(np.shape(tEval))
Iv3num = np.zeros(np.shape(tEval))
Dv3 = np.zeros(np.shape(tEval))
Dv3num = np.zeros(np.shape(tEval))
for i in np.arange(0, len(tEval)):
Iv1[i] = shape.Iv1(tEval[i])
Iv2[i] = shape.Iv2(tEval[i])
Iv1num[i] = sNum(shape.v1, tEval[i])
Iv2num[i] = sNum(shape.v2, tEval[i])
Dv1[i] = shape.Dv1(tEval[i])
Dv2[i] = shape.Dv2(tEval[i])
Dv1num[i] = aNum(shape.v1, tEval[i])
Dv2num[i] = aNum(shape.v2, tEval[i])
if nShapes == 3:
Iv3[i] = shape.Iv3(tEval[i])
Iv3num[i] = sNum(shape.v3, tEval[i])
Dv3[i] = shape.Dv3(tEval[i])
Dv3num[i] = aNum(shape.v3, tEval[i])
if nShapes == 3:
samplesAnalytical = np.vstack([Dv1, Dv2, Dv3, Iv1, Iv2, Iv3])
samplesNumerical = np.vstack([Dv1num, Dv2num, Dv3num,
Iv1num, Iv2num, Iv3num])
elif nShapes == 2:
samplesAnalytical = np.vstack([Dv1, Dv2, Iv1, Iv2])
samplesNumerical = np.vstack([Dv1num, Dv2num, Iv1num, Iv2num])
return samplesAnalytical, samplesNumerical
# number of revolutions
N = 2
# time interval and sampling steps
tMax = 500*24*60*60
tMax = 11
nSamples = 1001
tEval = np.linspace(0, tMax, nSamples)
# test shaping functions
shorthands = [ 'CPowPow2',
'CPowPow2_scaled',
'CPow2CosR5',
'CosR5P3CosR5P3SinR5',
'CosR5P3CosR5P3SinR5_scaled',
]
# return error if one of the specified functions is not close to the
# numerical computation
test = True
errShapes = ''
for shorthand in shorthands:
shape = shapeFunctions(N=N, shorthand=shorthand, tMax=tMax)
| |
<filename>04_ML_approach_part2.py
###############################################################################
# #
# machine learning approach part 2 #
# neural networks #
# June 23 2020 #
###############################################################################
### Loading libraries #########################################################
import time
import numpy as np
seed = np.random.seed(1)
import pandas as pd
pd.options.mode.chained_assignment = None
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning)
from keras.models import Sequential
from keras.layers import Dense
from keras.callbacks import EarlyStopping
from keras import backend as K
from keras.backend import clear_session
from sklearn.metrics import recall_score, confusion_matrix, roc_auc_score
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.model_selection import StratifiedKFold
from scipy import stats
import math
import pickle
######################################################## Loading libraries ####
### Declaring I/O variables ###################################################
input_file = 'pre-processed_data.pickle'
output_file = 'ML_summary_part2.pickle'
################################################## Declaring I/O variables ####
### Declaring Functions #######################################################
def specifitiy(y, y_pred):
tn, fp, fn, tp = confusion_matrix(y, y_pred).ravel()
return (tn / (tn + fp))
###################################################### Declaring Functions ####
### Main routine ##############################################################
# Registering initial time
a = time.time()
print("--start--")
# Open input file
datasets = pd.read_pickle(input_file)
k = 10
columns = ['n', 'DB', 'Level', 'Column',
'n_0', 'n_1',
'Sensitivity Train (95% CI)', 'Specificity Train (95% CI)', 'AUC Train (95% CI)',
'Sensitivity Validation (95% CI)', 'Specificity Validation (95% CI)', 'AUC Validation (95% CI)',
'Sensitivity Test', 'Specificity Test', 'AUC Test',
'Best_Classifier', 'Best_Parameters'
]
output_summary = pd.DataFrame(columns = columns)
n_datasets = len(datasets['info'])
ngram_ranges = [(1,1), (1,2), (1,3)]
max_dfs = [0.7, 0.8, 0.9, 0.95, 1.0]
min_dfs = [2, 10, 50]
binarys = [False, True]
use_idfs = [False, True]
norms = ['l1', 'l2', None]
optimizers = ['adam']
n_combinations = len(ngram_ranges) * len(max_dfs) * len(min_dfs) * \
len(binarys) * len(use_idfs) * len(norms) * \
len(optimizers)
for n in range(1, 157):
print()
print('Processing dataset number: ',n)
validation_scores = pd.DataFrame(columns = ['n',
'ngram_range',
'max_df',
'min_df',
'binary',
'use_idf',
'norm',
'optimizer'])
# Loading dataset info
dataset_info = datasets['info'].loc[n,:]
n_0 = dataset_info['n_0']
n_1 = dataset_info['n_1']
db_info = dataset_info['data_option']
level_info = dataset_info['level']
column_info = dataset_info['column']
go_on = dataset_info['go_on']
if go_on == True:
combination_summary = pd.DataFrame()
dataset = datasets[n]
X_train_validation = dataset['X_train_validation']
y_train_validation = dataset['y_train_validation']
X_test = dataset['X_test']
y_test = dataset['y_test']
vectorizer_dict = {}
combination = 0
AUC_mean_validation = '0.000'
for ngram_range in ngram_ranges:
if AUC_mean_validation == '1.000':
break
for max_df in max_dfs:
if AUC_mean_validation == '1.000':
print('AUC_mean_validation == 1.000')
break
for min_df in min_dfs:
if AUC_mean_validation == '1.000':
print('AUC_mean_validation == 1.000')
break
for binary in binarys:
if AUC_mean_validation == '1.000':
print('AUC_mean_validation == 1.000')
break
for use_idf in use_idfs:
if AUC_mean_validation == '1.000':
print('AUC_mean_validation == 1.000')
break
for norm in norms:
if AUC_mean_validation == '1.000':
print('AUC_mean_validation == 1.000')
break
for optimizer in optimizers:
if AUC_mean_validation == '1.000':
print('AUC_mean_validation == 1.000')
break
a11 = time.time()
kfold = StratifiedKFold(n_splits = k, shuffle = True, random_state = seed)
sensitivity_train_list = []
specificity_train_list = []
auc_train_list = []
sensitivity_validation_list = []
specificity_validation_list = []
auc_validation_list = []
fold = 0
for train_index, validation_index in kfold.split(X_train_validation, y_train_validation):
a1 = time.time()
X_train, y_train, X_validation, y_validation = X_train_validation.iloc[train_index], y_train_validation.iloc[train_index], X_train_validation.iloc[validation_index], y_train_validation.iloc[validation_index]
print()
print('Processing dataset number: ',n)
print('combination: ', combination, 'out of: ', n_combinations)
print('ngram_range: ',ngram_range)
print('max_df: ',max_df)
print('min_df: ',min_df)
print('binary: ',binary)
print('use_idf: ',use_idf)
print('norm: ',norm)
print('optimizer: ',optimizer)
print('Fold: ',fold)
print()
vectorizer = TfidfVectorizer(
ngram_range = ngram_range,
max_df = max_df,
min_df = min_df,
binary = binary,
use_idf = use_idf,
norm = norm,
)
X_train = vectorizer.fit_transform(X_train)
X_validation = vectorizer.transform(X_validation)
X_train = X_train.todense()
X_validation = X_validation.todense()
y_train = y_train.to_numpy()
y_validation = y_validation.to_numpy()
n_feat = X_train.shape[1]
if n_feat > 2048:
n_feat = 2048
model = Sequential()
model.add(Dense(n_feat,activation='relu'))
model.add(Dense(1,activation='sigmoid',))
model.compile(optimizer = optimizer,
loss = 'binary_crossentropy',
metrics = ['binary_accuracy'])
model.fit(X_train,
y_train,
epochs = 1000,
validation_data = (X_validation, y_validation),
verbose = 0,
shuffle = False,
initial_epoch = 0,
callbacks=[EarlyStopping(monitor='val_loss', min_delta = 0.01)]
)
y_pred_train = model.predict(X_train)
y_pred_validation = model.predict(X_validation)
clear_session()
# Calculating perfomance metrics
sensitivity_train_fold_list = []
specificity_train_fold_list = []
auc_train_fold_list = []
sensitivity_validation_fold_list = []
specificity_validation_fold_list = []
auc_validation_fold_list = []
threshold_index = []
for threshold in np.arange(0.01,1,0.01):
threshold_index.append(threshold)
y_pred_train_temp = [1 if prediction >= threshold else 0 for prediction in y_pred_train]
y_pred_validation_temp = [1 if prediction >= threshold else 0 for prediction in y_pred_validation]
sensitivity_train = recall_score(y_train, y_pred_train_temp)
specificity_train = specifitiy(y_train, y_pred_train_temp)
auc_train = roc_auc_score(y_train, y_pred_train_temp)
sensitivity_validation = recall_score(y_validation, y_pred_validation_temp)
specificity_validation = specifitiy(y_validation, y_pred_validation_temp)
auc_validation = roc_auc_score(y_validation, y_pred_validation_temp)
sensitivity_train_fold_list.append(sensitivity_train)
specificity_train_fold_list.append(specificity_train)
auc_train_fold_list.append(auc_train)
sensitivity_validation_fold_list.append(sensitivity_validation)
specificity_validation_fold_list.append(specificity_validation)
auc_validation_fold_list.append(auc_validation)
sensitivity_train_list.append(sensitivity_train_fold_list)
specificity_train_list.append(specificity_train_fold_list)
auc_train_list.append(auc_train_fold_list)
sensitivity_validation_list.append(sensitivity_validation_fold_list)
specificity_validation_list.append(specificity_validation_fold_list)
auc_validation_list.append(auc_validation_fold_list)
if fold == 0:
vectorizer_dict[combination] = {fold : vectorizer}
else:
vectorizer_dict[combination].update({fold : vectorizer})
fold += 1
b1 = time.time()
print('Fold processing time: %0.2f minutos' %((b1-a1)/60))
print()
auc_threshold = []
auc_threshold_max_fold = []
for threshold in range(0,99):
auc_temp = []
for f in range(0,fold):
auc_temp.append(auc_validation_list[f][threshold])
# Identify the fold that had the best AUC for each threshold
auc_threshold_max_fold.append(auc_temp.index(max(auc_temp)))
auc_threshold.append(np.mean(auc_temp))
best_threshold_n = auc_threshold.index(max(auc_threshold))
best_threshold = threshold_index[best_threshold_n]
reference_fold = auc_threshold_max_fold[best_threshold_n]
best_threshold = np.round(best_threshold,3)
sensitivity_train = []
specificity_train = []
AUC_train = []
sensitivity_validation = []
specificity_validation = []
AUC_validation = []
for f in range(0,fold):
sensitivity_train.append(sensitivity_train_list[f][best_threshold_n])
specificity_train.append(specificity_train_list[f][best_threshold_n])
AUC_train.append(auc_train_list[f][best_threshold_n])
sensitivity_validation.append(sensitivity_validation_list[f][best_threshold_n])
specificity_validation.append(specificity_validation_list[f][best_threshold_n])
AUC_validation.append(auc_validation_list[f][best_threshold_n])
# sensitivity train
sensitivity_mean_train = '{:1.3f}'.format(round(np.mean(sensitivity_train), 3))
sensitivity_LB_train = np.mean(sensitivity_train) - stats.t.ppf(1-0.025, k - 1)*np.std(sensitivity_train)/math.sqrt(k)
if sensitivity_LB_train < 0:
sensitivity_LB_train = 0
sensitivity_UB_train = np.mean(sensitivity_train) + stats.t.ppf(1-0.025, k - 1)*np.std(sensitivity_train)/math.sqrt(k)
if sensitivity_UB_train > 1:
sensitivity_UB_train = 1
sensitivity_LB_train = '{:1.3f}'.format(sensitivity_LB_train,3)
sensitivity_UB_train = '{:1.3f}'.format(sensitivity_UB_train,3)
# sensitivity validation
sensitivity_mean_validation = '{:1.3f}'.format(round(np.mean(sensitivity_validation), 3))
sensitivity_LB_validation = np.mean(sensitivity_validation) - stats.t.ppf(1-0.025, k - 1)*np.std(sensitivity_validation)/math.sqrt(k)
if sensitivity_LB_validation < 0:
sensitivity_LB_validation = 0
sensitivity_UB_validation = np.mean(sensitivity_validation) + stats.t.ppf(1-0.025, k - 1)*np.std(sensitivity_validation)/math.sqrt(k)
if sensitivity_UB_validation > 1:
sensitivity_UB_validation = 1
sensitivity_LB_validation = '{:1.3f}'.format(sensitivity_LB_validation,3)
sensitivity_UB_validation = '{:1.3f}'.format(sensitivity_UB_validation,3)
# Specificity train
specificity_mean_train = '{:1.3f}'.format(round(np.mean(specificity_train), 3))
specificity_LB_train = np.mean(specificity_train) - stats.t.ppf(1-0.025, k - 1)*np.std(specificity_train)/math.sqrt(k)
if specificity_LB_train < 0:
specificity_LB_train = 0
specificity_UB_train = np.mean(specificity_train) + stats.t.ppf(1-0.025, k - 1)*np.std(specificity_train)/math.sqrt(k)
if specificity_UB_train > 1:
specificity_UB_train = 1
specificity_LB_train = '{:1.3f}'.format(specificity_LB_train,3)
specificity_UB_train = '{:1.3f}'.format(specificity_UB_train,3)
# Specificity validation
specificity_mean_validation = '{:1.3f}'.format(round(np.mean(specificity_validation), 3))
specificity_LB_validation = np.mean(specificity_validation) - stats.t.ppf(1-0.025, k - 1)*np.std(specificity_validation)/math.sqrt(k)
if specificity_LB_validation < 0:
specificity_LB_validation = 0
specificity_UB_validation = np.mean(specificity_validation) + stats.t.ppf(1-0.025, k - 1)*np.std(specificity_validation)/math.sqrt(k)
if specificity_UB_validation > 1:
specificity_UB_validation = 1
specificity_LB_validation = '{:1.3f}'.format(specificity_LB_validation,3)
specificity_UB_validation = '{:1.3f}'.format(specificity_UB_validation,3)
# AUC train
AUC_mean_train = '{:1.3f}'.format(round(np.mean(AUC_train), 3))
AUC_LB_train = np.mean(AUC_train) - stats.t.ppf(1-0.025, k - 1)*np.std(AUC_train)/math.sqrt(k)
if AUC_LB_train < 0:
AUC_LB_train = 0
AUC_UB_train = np.mean(AUC_train) + stats.t.ppf(1-0.025, k - 1)*np.std(AUC_train)/math.sqrt(k)
if AUC_UB_train > 1:
AUC_UB_train = 1
AUC_LB_train = '{:1.3f}'.format(AUC_LB_train,3)
AUC_UB_train = '{:1.3f}'.format(AUC_UB_train,3)
# AUC validation
AUC_mean_validation = '{:1.3f}'.format(round(np.mean(AUC_validation), 3))
AUC_LB_validation = np.mean(AUC_validation) - stats.t.ppf(1-0.025, k - 1)*np.std(AUC_validation)/math.sqrt(k)
if AUC_LB_validation < 0:
AUC_LB_validation = 0
AUC_UB_validation = np.mean(AUC_validation) + stats.t.ppf(1-0.025, k - 1)*np.std(AUC_validation)/math.sqrt(k)
if AUC_UB_validation > 1:
AUC_UB_validation = 1
AUC_LB_validation = '{:1.3f}'.format(AUC_LB_validation,3)
AUC_UB_validation = '{:1.3f}'.format(AUC_UB_validation,3)
# formating metrics for output
sensitivity_train = sensitivity_mean_train+' ('+sensitivity_LB_train+'-'+sensitivity_UB_train+')'
specificity_train = specificity_mean_train+' ('+specificity_LB_train+'-'+specificity_UB_train+')'
AUC_train = AUC_mean_train+' ('+AUC_LB_train+'-'+AUC_UB_train+')'
sensitivity_validation = sensitivity_mean_validation+' ('+sensitivity_LB_validation+'-'+sensitivity_UB_validation+')'
specificity_validation = specificity_mean_validation+' ('+specificity_LB_validation+'-'+specificity_UB_validation+')'
AUC_validation = AUC_mean_validation+' ('+AUC_LB_validation+'-'+AUC_UB_validation+')'
parameters = ', '.join(['ngram_range: '+str(ngram_range)] +
['max_df: '+str(max_df)] +
['min_df: '+str(min_df)] +
['binary: '+str(binary)] +
['use_idf: '+str(use_idf)] +
['norm: '+str(norm)] +
['optimizer: '+str(optimizer)])
# saving info of this round
combination_summary.loc[combination,'combination'] = combination
combination_summary.loc[combination,'ngram_range'] = str(ngram_range)
combination_summary.loc[combination,'max_df'] = str(max_df)
combination_summary.loc[combination,'min_df'] = str(min_df)
combination_summary.loc[combination,'binary'] = str(binary)
combination_summary.loc[combination,'use_idf'] = str(use_idf)
combination_summary.loc[combination,'norm'] = str(norm)
combination_summary.loc[combination,'optimizer'] = str(optimizer)
combination_summary.loc[combination,'Threshold'] = best_threshold
combination_summary.loc[combination,'reference_fold'] = reference_fold
combination_summary.loc[combination,'Sensitivity Train (95% CI)'] = sensitivity_train
combination_summary.loc[combination,'Specificity Train (95% CI)'] = specificity_train
combination_summary.loc[combination,'AUC Train (95% CI)'] = AUC_train
combination_summary.loc[combination,'Sensitivity Validation (95% CI)'] = sensitivity_validation
combination_summary.loc[combination,'Specificity Validation (95% CI)'] = specificity_validation
combination_summary.loc[combination,'AUC Validation (95% CI)'] = AUC_validation
combination += 1
b11 = time.time()
print('AUC Validation (95% CI): ', AUC_validation)
print('Combination processing time: %0.2f minutos' %((b11-a11)/60))
print()
combination_summary = combination_summary.sort_values(by = 'AUC Validation (95% CI)', ascending = False).reset_index(drop = True)
best_combination = combination_summary.loc[0, 'combination']
best_ngram_range = combination_summary.loc[0, 'ngram_range']
best_max_df = combination_summary.loc[0, 'max_df']
best_min_df = combination_summary.loc[0, 'min_df']
best_binary = combination_summary.loc[0, 'binary']
best_use_idf = combination_summary.loc[0, 'use_idf']
best_norm = combination_summary.loc[0, 'norm']
best_optimizer = combination_summary.loc[0, 'optimizer']
best_threshold = combination_summary.loc[0, 'Threshold']
best_reference_fold = combination_summary.loc[0, 'reference_fold']
best_sensitivity_train = combination_summary.loc[0,'Sensitivity Train (95% CI)']
best_specificity_train = combination_summary.loc[0,'Specificity Train (95% | |
<reponame>minakhoshbazm/CodART<gh_stars>0
# Generated from Java9.g4 by ANTLR 4.8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2x")
buf.write("\u04a4\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.")
buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64")
buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:")
buf.write("\4;\t;\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\t")
buf.write("C\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\t")
buf.write("L\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\t")
buf.write("U\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4")
buf.write("^\t^\4_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4")
buf.write("g\tg\4h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4")
buf.write("p\tp\4q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\4x\tx\4")
buf.write("y\ty\4z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080")
buf.write("\t\u0080\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083")
buf.write("\4\u0084\t\u0084\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087")
buf.write("\t\u0087\4\u0088\t\u0088\4\u0089\t\u0089\4\u008a\t\u008a")
buf.write("\4\u008b\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e")
buf.write("\t\u008e\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091")
buf.write("\4\u0092\t\u0092\4\u0093\t\u0093\4\u0094\t\u0094\4\u0095")
buf.write("\t\u0095\4\u0096\t\u0096\4\u0097\t\u0097\4\u0098\t\u0098")
buf.write("\4\u0099\t\u0099\4\u009a\t\u009a\4\u009b\t\u009b\4\u009c")
buf.write("\t\u009c\4\u009d\t\u009d\4\u009e\t\u009e\4\u009f\t\u009f")
buf.write("\4\u00a0\t\u00a0\4\u00a1\t\u00a1\4\u00a2\t\u00a2\4\u00a3")
buf.write("\t\u00a3\4\u00a4\t\u00a4\4\u00a5\t\u00a5\3\2\3\2\3\2\3")
buf.write("\2\3\2\3\3\3\3\3\3\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3\4")
buf.write("\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\6\3")
buf.write("\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\t")
buf.write("\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\n\3")
buf.write("\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13\3\13")
buf.write("\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3")
buf.write("\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\17")
buf.write("\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\21")
buf.write("\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\23")
buf.write("\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\24\3\25")
buf.write("\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26")
buf.write("\3\26\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27")
buf.write("\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\32")
buf.write("\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\34\3\34")
buf.write("\3\34\3\34\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35")
buf.write("\3\35\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\37\3\37")
buf.write("\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3!\3!\3!\3\"\3\"\3\"")
buf.write("\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3#\3#\3#\3#\3$\3$\3$\3$")
buf.write("\3$\3$\3$\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3&\3&\3&\3")
buf.write("&\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(")
buf.write("\3(\3)\3)\3)\3)\3)\3)\3)\3*\3*\3*\3*\3+\3+\3+\3+\3+\3")
buf.write("+\3+\3+\3,\3,\3,\3,\3,\3,\3,\3,\3-\3-\3-\3-\3-\3-\3-\3")
buf.write("-\3-\3-\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3/\3/\3/\3/\3\60")
buf.write("\3\60\3\60\3\60\3\60\3\60\3\61\3\61\3\61\3\61\3\61\3\61")
buf.write("\3\61\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\62\3\63")
buf.write("\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64")
buf.write("\3\64\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\65\3\65")
buf.write("\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67")
buf.write("\3\67\3\67\3\67\38\38\38\38\38\38\38\39\39\39\39\39\3")
buf.write("9\39\39\39\39\3:\3:\3:\3:\3;\3;\3;\3;\3;\3<\3<\3<\3<\3")
buf.write("<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3=\3>\3>\3?\3?\3?\3?\5?\u02e9")
buf.write("\n?\3@\3@\5@\u02ed\n@\3A\3A\5A\u02f1\nA\3B\3B\5B\u02f5")
buf.write("\nB\3C\3C\5C\u02f9\nC\3D\3D\3E\3E\3E\5E\u0300\nE\3E\3")
buf.write("E\3E\5E\u0305\nE\5E\u0307\nE\3F\3F\5F\u030b\nF\3F\5F\u030e")
buf.write("\nF\3G\3G\5G\u0312\nG\3H\3H\3I\6I\u0317\nI\rI\16I\u0318")
buf.write("\3J\3J\5J\u031d\nJ\3K\6K\u0320\nK\rK\16K\u0321\3L\3L\3")
buf.write("L\3L\3M\3M\5M\u032a\nM\3M\5M\u032d\nM\3N\3N\3O\6O\u0332")
buf.write("\nO\rO\16O\u0333\3P\3P\5P\u0338\nP\3Q\3Q\5Q\u033c\nQ\3")
buf.write("Q\3Q\3R\3R\5R\u0342\nR\3R\5R\u0345\nR\3S\3S\3T\6T\u034a")
buf.write("\nT\rT\16T\u034b\3U\3U\5U\u0350\nU\3V\3V\3V\3V\3W\3W\5")
buf.write("W\u0358\nW\3W\5W\u035b\nW\3X\3X\3Y\6Y\u0360\nY\rY\16Y")
buf.write("\u0361\3Z\3Z\5Z\u0366\nZ\3[\3[\5[\u036a\n[\3\\\3\\\3\\")
buf.write("\5\\\u036f\n\\\3\\\5\\\u0372\n\\\3\\\5\\\u0375\n\\\3\\")
buf.write("\3\\\3\\\5\\\u037a\n\\\3\\\5\\\u037d\n\\\3\\\3\\\3\\\5")
buf.write("\\\u0382\n\\\3\\\3\\\3\\\5\\\u0387\n\\\3]\3]\3]\3^\3^")
buf.write("\3_\5_\u038f\n_\3_\3_\3`\3`\3a\3a\3b\3b\3b\5b\u039a\n")
buf.write("b\3c\3c\5c\u039e\nc\3c\3c\3c\5c\u03a3\nc\3c\3c\5c\u03a7")
buf.write("\nc\3d\3d\3d\3e\3e\3f\3f\3f\3f\3f\3f\3f\3f\3f\5f\u03b7")
buf.write("\nf\3g\3g\3g\3g\3g\3g\3g\3g\5g\u03c1\ng\3h\3h\3i\3i\5")
buf.write("i\u03c7\ni\3i\3i\3j\6j\u03cc\nj\rj\16j\u03cd\3k\3k\5k")
buf.write("\u03d2\nk\3l\3l\3l\3l\5l\u03d8\nl\3m\3m\3m\3m\3m\3m\3")
buf.write("m\3m\3m\3m\3m\5m\u03e5\nm\3n\3n\3o\3o\6o\u03eb\no\ro\16")
buf.write("o\u03ec\3o\3o\3o\3o\3o\3p\3p\3p\3p\3p\3q\3q\3r\3r\3s\3")
buf.write("s\3t\3t\3u\3u\3v\3v\3w\3w\3x\3x\3y\3y\3z\3z\3z\3z\3{\3")
buf.write("{\3|\3|\3|\3}\3}\3~\3~\3\177\3\177\3\u0080\3\u0080\3\u0081")
buf.write("\3\u0081\3\u0082\3\u0082\3\u0083\3\u0083\3\u0084\3\u0084")
buf.write("\3\u0084\3\u0085\3\u0085\3\u0085\3\u0086\3\u0086\3\u0086")
buf.write("\3\u0087\3\u0087\3\u0087\3\u0088\3\u0088\3\u0088\3\u0089")
buf.write("\3\u0089\3\u0089\3\u008a\3\u008a\3\u008a\3\u008b\3\u008b")
buf.write("\3\u008b\3\u008c\3\u008c\3\u008c\3\u008d\3\u008d\3\u008e")
buf.write("\3\u008e\3\u008f\3\u008f\3\u0090\3\u0090\3\u0091\3\u0091")
buf.write("\3\u0092\3\u0092\3\u0093\3\u0093\3\u0094\3\u0094\3\u0095")
buf.write("\3\u0095\3\u0095\3\u0096\3\u0096\3\u0096\3\u0097\3\u0097")
buf.write("\3\u0097\3\u0098\3\u0098\3\u0098\3\u0099\3\u0099\3\u0099")
buf.write("\3\u009a\3\u009a\3\u009a\3\u009b\3\u009b\3\u009b\3\u009c")
buf.write("\3\u009c\3\u009c\3\u009d\3\u009d\3\u009d\3\u009d\3\u009e")
buf.write("\3\u009e\3\u009e\3\u009e\3\u009f\3\u009f\3\u009f\3\u009f")
buf.write("\3\u009f\3\u00a0\3\u00a0\7\u00a0\u0474\n\u00a0\f\u00a0")
buf.write("\16\u00a0\u0477\13\u00a0\3\u00a1\3\u00a1\3\u00a1\3\u00a1")
buf.write("\5\u00a1\u047d\n\u00a1\3\u00a2\3\u00a2\3\u00a2\3\u00a2")
buf.write("\5\u00a2\u0483\n\u00a2\3\u00a3\6\u00a3\u0486\n\u00a3\r")
buf.write("\u00a3\16\u00a3\u0487\3\u00a3\3\u00a3\3\u00a4\3\u00a4")
buf.write("\3\u00a4\3\u00a4\7\u00a4\u0490\n\u00a4\f\u00a4\16\u00a4")
buf.write("\u0493\13\u00a4\3\u00a4\3\u00a4\3\u00a4\3\u00a4\3\u00a4")
buf.write("\3\u00a5\3\u00a5\3\u00a5\3\u00a5\7\u00a5\u049e\n\u00a5")
buf.write("\f\u00a5\16\u00a5\u04a1\13\u00a5\3\u00a5\3\u00a5\3\u0491")
buf.write("\2\u00a6\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f")
buf.write("\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27")
buf.write("-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%")
buf.write("I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67")
buf.write("m8o9q:s;u<w=y>{?}@\177\2\u0081\2\u0083\2\u0085\2\u0087")
buf.write("\2\u0089\2\u008b\2\u008d\2\u008f\2\u0091\2\u0093\2\u0095")
buf.write("\2\u0097\2\u0099\2\u009b\2\u009d\2\u009f\2\u00a1\2\u00a3")
buf.write("\2\u00a5\2\u00a7\2\u00a9\2\u00ab\2\u00ad\2\u00af\2\u00b1")
buf.write("\2\u00b3\2\u00b5A\u00b7\2\u00b9\2\u00bb\2\u00bd\2\u00bf")
buf.write("\2\u00c1\2\u00c3\2\u00c5\2\u00c7\2\u00c9\2\u00cbB\u00cd")
buf.write("C\u00cf\2\u00d1D\u00d3\2\u00d5\2\u00d7\2\u00d9\2\u00db")
buf.write("\2\u00dd\2\u00dfE\u00e1F\u00e3G\u00e5H\u00e7I\u00e9J\u00eb")
buf.write("K\u00edL\u00efM\u00f1N\u00f3O\u00f5P\u00f7Q\u00f9R\u00fb")
buf.write("S\u00fdT\u00ffU\u0101V\u0103W\u0105X\u0107Y\u0109Z\u010b")
buf.write("[\u010d\\\u010f]\u0111^\u0113_\u0115`\u0117a\u0119b\u011b")
buf.write("c\u011dd\u011fe\u0121f\u0123g\u0125h\u0127i\u0129j\u012b")
buf.write("k\u012dl\u012fm\u0131n\u0133o\u0135p\u0137q\u0139r\u013b")
buf.write("s\u013dt\u013fu\u0141\2\u0143\2\u0145v\u0147w\u0149x\3")
buf.write("\2\30\4\2NNnn\3\2\63;\4\2ZZzz\5\2\62;CHch\3\2\629\4\2")
buf.write("DDdd\3\2\62\63\4\2GGgg\4\2--//\6\2FFHHffhh\4\2RRrr\6\2")
buf.write("\f\f\17\17))^^\6\2\f\f\17\17$$^^\n\2$$))^^ddhhppttvv\3")
buf.write("\2\62\65\6\2&&C\\aac|\4\2\2\u0081\ud802\udc01\3\2\ud802")
buf.write("\udc01\3\2\udc02\ue001\7\2&&\62;C\\aac|\5\2\13\f\16\17")
buf.write("\"\"\4\2\f\f\17\17\2\u04b3\2\3\3\2\2\2\2\5\3\2\2\2\2\7")
buf.write("\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2")
buf.write("\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2")
buf.write("\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2")
buf.write("\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3\2\2")
buf.write("\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2\63")
buf.write("\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3\2\2")
buf.write("\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2")
buf.write("\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3")
buf.write("\2\2\2\2Q\3\2\2\2\2S\3\2\2\2\2U\3\2\2\2\2W\3\2\2\2\2Y")
buf.write("\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2a\3\2\2\2\2")
buf.write("c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2")
buf.write("\2m\3\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2")
buf.write("\2\2w\3\2\2\2\2y\3\2\2\2\2{\3\2\2\2\2}\3\2\2\2\2\u00b5")
buf.write("\3\2\2\2\2\u00cb\3\2\2\2\2\u00cd\3\2\2\2\2\u00d1\3\2\2")
buf.write("\2\2\u00df\3\2\2\2\2\u00e1\3\2\2\2\2\u00e3\3\2\2\2\2\u00e5")
buf.write("\3\2\2\2\2\u00e7\3\2\2\2\2\u00e9\3\2\2\2\2\u00eb\3\2\2")
buf.write("\2\2\u00ed\3\2\2\2\2\u00ef\3\2\2\2\2\u00f1\3\2\2\2\2\u00f3")
buf.write("\3\2\2\2\2\u00f5\3\2\2\2\2\u00f7\3\2\2\2\2\u00f9\3\2\2")
buf.write("\2\2\u00fb\3\2\2\2\2\u00fd\3\2\2\2\2\u00ff\3\2\2\2\2\u0101")
buf.write("\3\2\2\2\2\u0103\3\2\2\2\2\u0105\3\2\2\2\2\u0107\3\2\2")
buf.write("\2\2\u0109\3\2\2\2\2\u010b\3\2\2\2\2\u010d\3\2\2\2\2\u010f")
buf.write("\3\2\2\2\2\u0111\3\2\2\2\2\u0113\3\2\2\2\2\u0115\3\2\2")
buf.write("\2\2\u0117\3\2\2\2\2\u0119\3\2\2\2\2\u011b\3\2\2\2\2\u011d")
buf.write("\3\2\2\2\2\u011f\3\2\2\2\2\u0121\3\2\2\2\2\u0123\3\2\2")
buf.write("\2\2\u0125\3\2\2\2\2\u0127\3\2\2\2\2\u0129\3\2\2\2\2\u012b")
buf.write("\3\2\2\2\2\u012d\3\2\2\2\2\u012f\3\2\2\2\2\u0131\3\2\2")
buf.write("\2\2\u0133\3\2\2\2\2\u0135\3\2\2\2\2\u0137\3\2\2\2\2\u0139")
buf.write("\3\2\2\2\2\u013b\3\2\2\2\2\u013d\3\2\2\2\2\u013f\3\2\2")
buf.write("\2\2\u0145\3\2\2\2\2\u0147\3\2\2\2\2\u0149\3\2\2\2\3\u014b")
buf.write("\3\2\2\2\5\u0150\3\2\2\2\7\u0157\3\2\2\2\t\u0160\3\2\2")
buf.write("\2\13\u0168\3\2\2\2\r\u016b\3\2\2\2\17\u0171\3\2\2\2\21")
buf.write("\u0176\3\2\2\2\23\u017f\3\2\2\2\25\u0184\3\2\2\2\27\u018f")
buf.write("\3\2\2\2\31\u0198\3\2\2\2\33\u019f\3\2\2\2\35\u01a7\3")
buf.write("\2\2\2\37\u01ad\3\2\2\2!\u01b2\3\2\2\2#\u01b7\3\2\2\2")
buf.write("%\u01bd\3\2\2\2\'\u01c2\3\2\2\2)\u01c8\3\2\2\2+\u01ce")
buf.write("\3\2\2\2-\u01d7\3\2\2\2/\u01df\3\2\2\2\61\u01e2\3\2\2")
buf.write("\2\63\u01e9\3\2\2\2\65\u01ee\3\2\2\2\67\u01f3\3\2\2\2")
buf.write("9\u01fb\3\2\2\2;\u0201\3\2\2\2=\u0209\3\2\2\2?\u020f\3")
buf.write("\2\2\2A\u0213\3\2\2\2C\u0216\3\2\2\2E\u021b\3\2\2\2G\u0226")
buf.write("\3\2\2\2I\u022d\3\2\2\2K\u0238\3\2\2\2M\u023c\3\2\2\2")
buf.write("O\u0246\3\2\2\2Q\u024b\3\2\2\2S\u0252\3\2\2\2U\u0256\3")
buf.write("\2\2\2W\u025e\3\2\2\2Y\u0266\3\2\2\2[\u0270\3\2\2\2]\u0277")
buf.write("\3\2\2\2_\u027e\3\2\2\2a\u0284\3\2\2\2c\u028b\3\2\2\2")
buf.write("e\u0294\3\2\2\2g\u029a\3\2\2\2i\u02a1\3\2\2\2k\u02ae\3")
buf.write("\2\2\2m\u02b3\3\2\2\2o\u02b9\3\2\2\2q\u02c0\3\2\2\2s\u02ca")
buf.write("\3\2\2\2u\u02ce\3\2\2\2w\u02d3\3\2\2\2y\u02dc\3\2\2\2")
buf.write("{\u02e2\3\2\2\2}\u02e8\3\2\2\2\177\u02ea\3\2\2\2\u0081")
buf.write("\u02ee\3\2\2\2\u0083\u02f2\3\2\2\2\u0085\u02f6\3\2\2\2")
buf.write("\u0087\u02fa\3\2\2\2\u0089\u0306\3\2\2\2\u008b\u0308\3")
buf.write("\2\2\2\u008d\u0311\3\2\2\2\u008f\u0313\3\2\2\2\u0091\u0316")
buf.write("\3\2\2\2\u0093\u031c\3\2\2\2\u0095\u031f\3\2\2\2\u0097")
buf.write("\u0323\3\2\2\2\u0099\u0327\3\2\2\2\u009b\u032e\3\2\2\2")
buf.write("\u009d\u0331\3\2\2\2\u009f\u0337\3\2\2\2\u00a1\u0339\3")
buf.write("\2\2\2\u00a3\u033f\3\2\2\2\u00a5\u0346\3\2\2\2\u00a7\u0349")
buf.write("\3\2\2\2\u00a9\u034f\3\2\2\2\u00ab\u0351\3\2\2\2\u00ad")
buf.write("\u0355\3\2\2\2\u00af\u035c\3\2\2\2\u00b1\u035f\3\2\2\2")
buf.write("\u00b3\u0365\3\2\2\2\u00b5\u0369\3\2\2\2\u00b7\u0386\3")
buf.write("\2\2\2\u00b9\u0388\3\2\2\2\u00bb\u038b\3\2\2\2\u00bd\u038e")
buf.write("\3\2\2\2\u00bf\u0392\3\2\2\2\u00c1\u0394\3\2\2\2\u00c3")
buf.write("\u0396\3\2\2\2\u00c5\u03a6\3\2\2\2\u00c7\u03a8\3\2\2\2")
buf.write("\u00c9\u03ab\3\2\2\2\u00cb\u03b6\3\2\2\2\u00cd\u03c0\3")
buf.write("\2\2\2\u00cf\u03c2\3\2\2\2\u00d1\u03c4\3\2\2\2\u00d3\u03cb")
buf.write("\3\2\2\2\u00d5\u03d1\3\2\2\2\u00d7\u03d7\3\2\2\2\u00d9")
buf.write("\u03e4\3\2\2\2\u00db\u03e6\3\2\2\2\u00dd\u03e8\3\2\2\2")
buf.write("\u00df\u03f3\3\2\2\2\u00e1\u03f8\3\2\2\2\u00e3\u03fa\3")
buf.write("\2\2\2\u00e5\u03fc\3\2\2\2\u00e7\u03fe\3\2\2\2\u00e9\u0400")
buf.write("\3\2\2\2\u00eb\u0402\3\2\2\2\u00ed\u0404\3\2\2\2\u00ef")
buf.write("\u0406\3\2\2\2\u00f1\u0408\3\2\2\2\u00f3\u040a\3\2\2\2")
buf.write("\u00f5\u040e\3\2\2\2\u00f7\u0410\3\2\2\2\u00f9\u0413\3")
buf.write("\2\2\2\u00fb\u0415\3\2\2\2\u00fd\u0417\3\2\2\2\u00ff\u0419")
buf.write("\3\2\2\2\u0101\u041b\3\2\2\2\u0103\u041d\3\2\2\2\u0105")
buf.write("\u041f\3\2\2\2\u0107\u0421\3\2\2\2\u0109\u0424\3\2\2\2")
buf.write("\u010b\u0427\3\2\2\2\u010d\u042a\3\2\2\2\u010f\u042d\3")
buf.write("\2\2\2\u0111\u0430\3\2\2\2\u0113\u0433\3\2\2\2\u0115\u0436")
buf.write("\3\2\2\2\u0117\u0439\3\2\2\2\u0119\u043c\3\2\2\2\u011b")
buf.write("\u043e\3\2\2\2\u011d\u0440\3\2\2\2\u011f\u0442\3\2\2\2")
buf.write("\u0121\u0444\3\2\2\2\u0123\u0446\3\2\2\2\u0125\u0448\3")
buf.write("\2\2\2\u0127\u044a\3\2\2\2\u0129\u044c\3\2\2\2\u012b\u044f")
buf.write("\3\2\2\2\u012d\u0452\3\2\2\2\u012f\u0455\3\2\2\2\u0131")
buf.write("\u0458\3\2\2\2\u0133\u045b\3\2\2\2\u0135\u045e\3\2\2\2")
buf.write("\u0137\u0461\3\2\2\2\u0139\u0464\3\2\2\2\u013b\u0468\3")
buf.write("\2\2\2\u013d\u046c\3\2\2\2\u013f\u0471\3\2\2\2\u0141\u047c")
buf.write("\3\2\2\2\u0143\u0482\3\2\2\2\u0145\u0485\3\2\2\2\u0147")
buf.write("\u048b\3\2\2\2\u0149\u0499\3\2\2\2\u014b\u014c\7q\2\2")
buf.write("\u014c\u014d\7r\2\2\u014d\u014e\7g\2\2\u014e\u014f\7p")
buf.write("\2\2\u014f\4\3\2\2\2\u0150\u0151\7o\2\2\u0151\u0152\7")
buf.write("q\2\2\u0152\u0153\7f\2\2\u0153\u0154\7w\2\2\u0154\u0155")
buf.write("\7n\2\2\u0155\u0156\7g\2\2\u0156\6\3\2\2\2\u0157\u0158")
buf.write("\7t\2\2\u0158\u0159\7g\2\2\u0159\u015a\7s\2\2\u015a\u015b")
buf.write("\7w\2\2\u015b\u015c\7k\2\2\u015c\u015d\7t\2\2\u015d\u015e")
buf.write("\7g\2\2\u015e\u015f\7u\2\2\u015f\b\3\2\2\2\u0160\u0161")
buf.write("\7g\2\2\u0161\u0162\7z\2\2\u0162\u0163\7r\2\2\u0163\u0164")
buf.write("\7q\2\2\u0164\u0165\7t\2\2\u0165\u0166\7v\2\2\u0166\u0167")
buf.write("\7u\2\2\u0167\n\3\2\2\2\u0168\u0169\7v\2\2\u0169\u016a")
buf.write("\7q\2\2\u016a\f\3\2\2\2\u016b\u016c\7q\2\2\u016c\u016d")
buf.write("\7r\2\2\u016d\u016e\7g\2\2\u016e\u016f\7p\2\2\u016f\u0170")
buf.write("\7u\2\2\u0170\16\3\2\2\2\u0171\u0172\7w\2\2\u0172\u0173")
buf.write("\7u\2\2\u0173\u0174\7g\2\2\u0174\u0175\7u\2\2\u0175\20")
buf.write("\3\2\2\2\u0176\u0177\7r\2\2\u0177\u0178\7t\2\2\u0178\u0179")
buf.write("\7q\2\2\u0179\u017a\7x\2\2\u017a\u017b\7k\2\2\u017b\u017c")
buf.write("\7f\2\2\u017c\u017d\7g\2\2\u017d\u017e\7u\2\2\u017e\22")
buf.write("\3\2\2\2\u017f\u0180\7y\2\2\u0180\u0181\7k\2\2\u0181\u0182")
buf.write("\7v\2\2\u0182\u0183\7j\2\2\u0183\24\3\2\2\2\u0184\u0185")
buf.write("\7v\2\2\u0185\u0186\7t\2\2\u0186\u0187\7c\2\2\u0187\u0188")
buf.write("\7p\2\2\u0188\u0189\7u\2\2\u0189\u018a\7k\2\2\u018a\u018b")
buf.write("\7v\2\2\u018b\u018c\7k\2\2\u018c\u018d\7x\2\2\u018d\u018e")
buf.write("\7g\2\2\u018e\26\3\2\2\2\u018f\u0190\7c\2\2\u0190\u0191")
buf.write("\7d\2\2\u0191\u0192\7u\2\2\u0192\u0193\7v\2\2\u0193\u0194")
buf.write("\7t\2\2\u0194\u0195\7c\2\2\u0195\u0196\7e\2\2\u0196\u0197")
buf.write("\7v\2\2\u0197\30\3\2\2\2\u0198\u0199\7c\2\2\u0199\u019a")
buf.write("\7u\2\2\u019a\u019b\7u\2\2\u019b\u019c\7g\2\2\u019c\u019d")
buf.write("\7t\2\2\u019d\u019e\7v\2\2\u019e\32\3\2\2\2\u019f\u01a0")
buf.write("\7d\2\2\u01a0\u01a1\7q\2\2\u01a1\u01a2\7q\2\2\u01a2\u01a3")
buf.write("\7n\2\2\u01a3\u01a4\7g\2\2\u01a4\u01a5\7c\2\2\u01a5\u01a6")
buf.write("\7p\2\2\u01a6\34\3\2\2\2\u01a7\u01a8\7d\2\2\u01a8\u01a9")
buf.write("\7t\2\2\u01a9\u01aa\7g\2\2\u01aa\u01ab\7c\2\2\u01ab\u01ac")
buf.write("\7m\2\2\u01ac\36\3\2\2\2\u01ad\u01ae\7d\2\2\u01ae\u01af")
buf.write("\7{\2\2\u01af\u01b0\7v\2\2\u01b0\u01b1\7g\2\2\u01b1 \3")
buf.write("\2\2\2\u01b2\u01b3\7e\2\2\u01b3\u01b4\7c\2\2\u01b4\u01b5")
buf.write("\7u\2\2\u01b5\u01b6\7g\2\2\u01b6\"\3\2\2\2\u01b7\u01b8")
buf.write("\7e\2\2\u01b8\u01b9\7c\2\2\u01b9\u01ba\7v\2\2\u01ba\u01bb")
buf.write("\7e\2\2\u01bb\u01bc\7j\2\2\u01bc$\3\2\2\2\u01bd\u01be")
buf.write("\7e\2\2\u01be\u01bf\7j\2\2\u01bf\u01c0\7c\2\2\u01c0\u01c1")
buf.write("\7t\2\2\u01c1&\3\2\2\2\u01c2\u01c3\7e\2\2\u01c3\u01c4")
buf.write("\7n\2\2\u01c4\u01c5\7c\2\2\u01c5\u01c6\7u\2\2\u01c6\u01c7")
buf.write("\7u\2\2\u01c7(\3\2\2\2\u01c8\u01c9\7e\2\2\u01c9\u01ca")
buf.write("\7q\2\2\u01ca\u01cb\7p\2\2\u01cb\u01cc\7u\2\2\u01cc\u01cd")
buf.write("\7v\2\2\u01cd*\3\2\2\2\u01ce\u01cf\7e\2\2\u01cf\u01d0")
buf.write("\7q\2\2\u01d0\u01d1\7p\2\2\u01d1\u01d2\7v\2\2\u01d2\u01d3")
buf.write("\7k\2\2\u01d3\u01d4\7p\2\2\u01d4\u01d5\7w\2\2\u01d5\u01d6")
buf.write("\7g\2\2\u01d6,\3\2\2\2\u01d7\u01d8\7f\2\2\u01d8\u01d9")
buf.write("\7g\2\2\u01d9\u01da\7h\2\2\u01da\u01db\7c\2\2\u01db\u01dc")
buf.write("\7w\2\2\u01dc\u01dd\7n\2\2\u01dd\u01de\7v\2\2\u01de.\3")
buf.write("\2\2\2\u01df\u01e0\7f\2\2\u01e0\u01e1\7q\2\2\u01e1\60")
buf.write("\3\2\2\2\u01e2\u01e3\7f\2\2\u01e3\u01e4\7q\2\2\u01e4\u01e5")
buf.write("\7w\2\2\u01e5\u01e6\7d\2\2\u01e6\u01e7\7n\2\2\u01e7\u01e8")
buf.write("\7g\2\2\u01e8\62\3\2\2\2\u01e9\u01ea\7g\2\2\u01ea\u01eb")
buf.write("\7n\2\2\u01eb\u01ec\7u\2\2\u01ec\u01ed\7g\2\2\u01ed\64")
buf.write("\3\2\2\2\u01ee\u01ef\7g\2\2\u01ef\u01f0\7p\2\2\u01f0\u01f1")
buf.write("\7w\2\2\u01f1\u01f2\7o\2\2\u01f2\66\3\2\2\2\u01f3\u01f4")
buf.write("\7g\2\2\u01f4\u01f5\7z\2\2\u01f5\u01f6\7v\2\2\u01f6\u01f7")
buf.write("\7g\2\2\u01f7\u01f8\7p\2\2\u01f8\u01f9\7f\2\2\u01f9\u01fa")
buf.write("\7u\2\2\u01fa8\3\2\2\2\u01fb\u01fc\7h\2\2\u01fc\u01fd")
buf.write("\7k\2\2\u01fd\u01fe\7p\2\2\u01fe\u01ff\7c\2\2\u01ff\u0200")
buf.write("\7n\2\2\u0200:\3\2\2\2\u0201\u0202\7h\2\2\u0202\u0203")
buf.write("\7k\2\2\u0203\u0204\7p\2\2\u0204\u0205\7c\2\2\u0205\u0206")
buf.write("\7n\2\2\u0206\u0207\7n\2\2\u0207\u0208\7{\2\2\u0208<\3")
buf.write("\2\2\2\u0209\u020a\7h\2\2\u020a\u020b\7n\2\2\u020b\u020c")
buf.write("\7q\2\2\u020c\u020d\7c\2\2\u020d\u020e\7v\2\2\u020e>\3")
buf.write("\2\2\2\u020f\u0210\7h\2\2\u0210\u0211\7q\2\2\u0211\u0212")
buf.write("\7t\2\2\u0212@\3\2\2\2\u0213\u0214\7k\2\2\u0214\u0215")
buf.write("\7h\2\2\u0215B\3\2\2\2\u0216\u0217\7i\2\2\u0217\u0218")
buf.write("\7q\2\2\u0218\u0219\7v\2\2\u0219\u021a\7q\2\2\u021aD\3")
buf.write("\2\2\2\u021b\u021c\7k\2\2\u021c\u021d\7o\2\2\u021d\u021e")
buf.write("\7r\2\2\u021e\u021f\7n\2\2\u021f\u0220\7g\2\2\u0220\u0221")
buf.write("\7o\2\2\u0221\u0222\7g\2\2\u0222\u0223\7p\2\2\u0223\u0224")
buf.write("\7v\2\2\u0224\u0225\7u\2\2\u0225F\3\2\2\2\u0226\u0227")
buf.write("\7k\2\2\u0227\u0228\7o\2\2\u0228\u0229\7r\2\2\u0229\u022a")
buf.write("\7q\2\2\u022a\u022b\7t\2\2\u022b\u022c\7v\2\2\u022cH\3")
buf.write("\2\2\2\u022d\u022e\7k\2\2\u022e\u022f\7p\2\2\u022f\u0230")
buf.write("\7u\2\2\u0230\u0231\7v\2\2\u0231\u0232\7c\2\2\u0232\u0233")
buf.write("\7p\2\2\u0233\u0234\7e\2\2\u0234\u0235\7g\2\2\u0235\u0236")
buf.write("\7q\2\2\u0236\u0237\7h\2\2\u0237J\3\2\2\2\u0238\u0239")
buf.write("\7k\2\2\u0239\u023a\7p\2\2\u023a\u023b\7v\2\2\u023bL\3")
buf.write("\2\2\2\u023c\u023d\7k\2\2\u023d\u023e\7p\2\2\u023e\u023f")
buf.write("\7v\2\2\u023f\u0240\7g\2\2\u0240\u0241\7t\2\2\u0241\u0242")
buf.write("\7h\2\2\u0242\u0243\7c\2\2\u0243\u0244\7e\2\2\u0244\u0245")
buf.write("\7g\2\2\u0245N\3\2\2\2\u0246\u0247\7n\2\2\u0247\u0248")
buf.write("\7q\2\2\u0248\u0249\7p\2\2\u0249\u024a\7i\2\2\u024aP\3")
buf.write("\2\2\2\u024b\u024c\7p\2\2\u024c\u024d\7c\2\2\u024d\u024e")
buf.write("\7v\2\2\u024e\u024f\7k\2\2\u024f\u0250\7x\2\2\u0250\u0251")
buf.write("\7g\2\2\u0251R\3\2\2\2\u0252\u0253\7p\2\2\u0253\u0254")
buf.write("\7g\2\2\u0254\u0255\7y\2\2\u0255T\3\2\2\2\u0256\u0257")
buf.write("\7r\2\2\u0257\u0258\7c\2\2\u0258\u0259\7e\2\2\u0259\u025a")
buf.write("\7m\2\2\u025a\u025b\7c\2\2\u025b\u025c\7i\2\2\u025c\u025d")
buf.write("\7g\2\2\u025dV\3\2\2\2\u025e\u025f\7r\2\2\u025f\u0260")
buf.write("\7t\2\2\u0260\u0261\7k\2\2\u0261\u0262\7x\2\2\u0262\u0263")
buf.write("\7c\2\2\u0263\u0264\7v\2\2\u0264\u0265\7g\2\2\u0265X\3")
buf.write("\2\2\2\u0266\u0267\7r\2\2\u0267\u0268\7t\2\2\u0268\u0269")
buf.write("\7q\2\2\u0269\u026a\7v\2\2\u026a\u026b\7g\2\2\u026b\u026c")
buf.write("\7e\2\2\u026c\u026d\7v\2\2\u026d\u026e\7g\2\2\u026e\u026f")
buf.write("\7f\2\2\u026fZ\3\2\2\2\u0270\u0271\7r\2\2\u0271\u0272")
buf.write("\7w\2\2\u0272\u0273\7d\2\2\u0273\u0274\7n\2\2\u0274\u0275")
buf.write("\7k\2\2\u0275\u0276\7e\2\2\u0276\\\3\2\2\2\u0277\u0278")
buf.write("\7t\2\2\u0278\u0279\7g\2\2\u0279\u027a\7v\2\2\u027a\u027b")
buf.write("\7w\2\2\u027b\u027c\7t\2\2\u027c\u027d\7p\2\2\u027d^\3")
buf.write("\2\2\2\u027e\u027f\7u\2\2\u027f\u0280\7j\2\2\u0280\u0281")
buf.write("\7q\2\2\u0281\u0282\7t\2\2\u0282\u0283\7v\2\2\u0283`\3")
buf.write("\2\2\2\u0284\u0285\7u\2\2\u0285\u0286\7v\2\2\u0286\u0287")
buf.write("\7c\2\2\u0287\u0288\7v\2\2\u0288\u0289\7k\2\2\u0289\u028a")
buf.write("\7e\2\2\u028ab\3\2\2\2\u028b\u028c\7u\2\2\u028c\u028d")
buf.write("\7v\2\2\u028d\u028e\7t\2\2\u028e\u028f\7k\2\2\u028f\u0290")
buf.write("\7e\2\2\u0290\u0291\7v\2\2\u0291\u0292\7h\2\2\u0292\u0293")
buf.write("\7r\2\2\u0293d\3\2\2\2\u0294\u0295\7u\2\2\u0295\u0296")
buf.write("\7w\2\2\u0296\u0297\7r\2\2\u0297\u0298\7g\2\2\u0298\u0299")
buf.write("\7t\2\2\u0299f\3\2\2\2\u029a\u029b\7u\2\2\u029b\u029c")
buf.write("\7y\2\2\u029c\u029d\7k\2\2\u029d\u029e\7v\2\2\u029e\u029f")
buf.write("\7e\2\2\u029f\u02a0\7j\2\2\u02a0h\3\2\2\2\u02a1\u02a2")
buf.write("\7u\2\2\u02a2\u02a3\7{\2\2\u02a3\u02a4\7p\2\2\u02a4\u02a5")
buf.write("\7e\2\2\u02a5\u02a6\7j\2\2\u02a6\u02a7\7t\2\2\u02a7\u02a8")
buf.write("\7q\2\2\u02a8\u02a9\7p\2\2\u02a9\u02aa\7k\2\2\u02aa\u02ab")
buf.write("\7|\2\2\u02ab\u02ac\7g\2\2\u02ac\u02ad\7f\2\2\u02adj\3")
buf.write("\2\2\2\u02ae\u02af\7v\2\2\u02af\u02b0\7j\2\2\u02b0\u02b1")
buf.write("\7k\2\2\u02b1\u02b2\7u\2\2\u02b2l\3\2\2\2\u02b3\u02b4")
buf.write("\7v\2\2\u02b4\u02b5\7j\2\2\u02b5\u02b6\7t\2\2\u02b6\u02b7")
buf.write("\7q\2\2\u02b7\u02b8\7y\2\2\u02b8n\3\2\2\2\u02b9\u02ba")
buf.write("\7v\2\2\u02ba\u02bb\7j\2\2\u02bb\u02bc\7t\2\2\u02bc\u02bd")
buf.write("\7q\2\2\u02bd\u02be\7y\2\2\u02be\u02bf\7u\2\2\u02bfp\3")
buf.write("\2\2\2\u02c0\u02c1\7v\2\2\u02c1\u02c2\7t\2\2\u02c2\u02c3")
buf.write("\7c\2\2\u02c3\u02c4\7p\2\2\u02c4\u02c5\7u\2\2\u02c5\u02c6")
buf.write("\7k\2\2\u02c6\u02c7\7g\2\2\u02c7\u02c8\7p\2\2\u02c8\u02c9")
buf.write("\7v\2\2\u02c9r\3\2\2\2\u02ca\u02cb\7v\2\2\u02cb\u02cc")
buf.write("\7t\2\2\u02cc\u02cd\7{\2\2\u02cdt\3\2\2\2\u02ce\u02cf")
buf.write("\7x\2\2\u02cf\u02d0\7q\2\2\u02d0\u02d1\7k\2\2\u02d1\u02d2")
buf.write("\7f\2\2\u02d2v\3\2\2\2\u02d3\u02d4\7x\2\2\u02d4\u02d5")
buf.write("\7q\2\2\u02d5\u02d6\7n\2\2\u02d6\u02d7\7c\2\2\u02d7\u02d8")
buf.write("\7v\2\2\u02d8\u02d9\7k\2\2\u02d9\u02da\7n\2\2\u02da\u02db")
buf.write("\7g\2\2\u02dbx\3\2\2\2\u02dc\u02dd\7y\2\2\u02dd\u02de")
buf.write("\7j\2\2\u02de\u02df\7k\2\2\u02df\u02e0\7n\2\2\u02e0\u02e1")
buf.write("\7g\2\2\u02e1z\3\2\2\2\u02e2\u02e3\7a\2\2\u02e3|\3\2\2")
buf.write("\2\u02e4\u02e9\5\177@\2\u02e5\u02e9\5\u0081A\2\u02e6\u02e9")
buf.write("\5\u0083B\2\u02e7\u02e9\5\u0085C\2\u02e8\u02e4\3\2\2\2")
buf.write("\u02e8\u02e5\3\2\2\2\u02e8\u02e6\3\2\2\2\u02e8\u02e7\3")
buf.write("\2\2\2\u02e9~\3\2\2\2\u02ea\u02ec\5\u0089E\2\u02eb\u02ed")
buf.write("\5\u0087D\2\u02ec\u02eb\3\2\2\2\u02ec\u02ed\3\2\2\2\u02ed")
buf.write("\u0080\3\2\2\2\u02ee\u02f0\5\u0097L\2\u02ef\u02f1\5\u0087")
buf.write("D\2\u02f0\u02ef\3\2\2\2\u02f0\u02f1\3\2\2\2\u02f1\u0082")
buf.write("\3\2\2\2\u02f2\u02f4\5\u00a1Q\2\u02f3\u02f5\5\u0087D\2")
buf.write("\u02f4\u02f3\3\2\2\2\u02f4\u02f5\3\2\2\2\u02f5\u0084\3")
buf.write("\2\2\2\u02f6\u02f8\5\u00abV\2\u02f7\u02f9\5\u0087D\2\u02f8")
buf.write("\u02f7\3\2\2\2\u02f8\u02f9\3\2\2\2\u02f9\u0086\3\2\2\2")
buf.write("\u02fa\u02fb\t\2\2\2\u02fb\u0088\3\2\2\2\u02fc\u0307\7")
buf.write("\62\2\2\u02fd\u0304\5\u008fH\2\u02fe\u0300\5\u008bF\2")
buf.write("\u02ff\u02fe\3\2\2\2\u02ff\u0300\3\2\2\2\u0300\u0305\3")
buf.write("\2\2\2\u0301\u0302\5\u0095K\2\u0302\u0303\5\u008bF\2\u0303")
buf.write("\u0305\3\2\2\2\u0304\u02ff\3\2\2\2\u0304\u0301\3\2\2\2")
buf.write("\u0305\u0307\3\2\2\2\u0306\u02fc\3\2\2\2\u0306\u02fd\3")
buf.write("\2\2\2\u0307\u008a\3\2\2\2\u0308\u030d\5\u008dG\2\u0309")
buf.write("\u030b\5\u0091I\2\u030a\u0309\3\2\2\2\u030a\u030b\3\2")
buf.write("\2\2\u030b\u030c\3\2\2\2\u030c\u030e\5\u008dG\2\u030d")
buf.write("\u030a\3\2\2\2\u030d\u030e\3\2\2\2\u030e\u008c\3\2\2\2")
buf.write("\u030f\u0312\7\62\2\2\u0310\u0312\5\u008fH\2\u0311\u030f")
buf.write("\3\2\2\2\u0311\u0310\3\2\2\2\u0312\u008e\3\2\2\2\u0313")
buf.write("\u0314\t\3\2\2\u0314\u0090\3\2\2\2\u0315\u0317\5\u0093")
buf.write("J\2\u0316\u0315\3\2\2\2\u0317\u0318\3\2\2\2\u0318\u0316")
buf.write("\3\2\2\2\u0318\u0319\3\2\2\2\u0319\u0092\3\2\2\2\u031a")
buf.write("\u031d\5\u008dG\2\u031b\u031d\7a\2\2\u031c\u031a\3\2\2")
buf.write("\2\u031c\u031b\3\2\2\2\u031d\u0094\3\2\2\2\u031e\u0320")
buf.write("\7a\2\2\u031f\u031e\3\2\2\2\u0320\u0321\3\2\2\2\u0321")
buf.write("\u031f\3\2\2\2\u0321\u0322\3\2\2\2\u0322\u0096\3\2\2\2")
buf.write("\u0323\u0324\7\62\2\2\u0324\u0325\t\4\2\2\u0325\u0326")
buf.write("\5\u0099M\2\u0326\u0098\3\2\2\2\u0327\u032c\5\u009bN\2")
buf.write("\u0328\u032a\5\u009dO\2\u0329\u0328\3\2\2\2\u0329\u032a")
buf.write("\3\2\2\2\u032a\u032b\3\2\2\2\u032b\u032d\5\u009bN\2\u032c")
buf.write("\u0329\3\2\2\2\u032c\u032d\3\2\2\2\u032d\u009a\3\2\2\2")
buf.write("\u032e\u032f\t\5\2\2\u032f\u009c\3\2\2\2\u0330\u0332\5")
buf.write("\u009fP\2\u0331\u0330\3\2\2\2\u0332\u0333\3\2\2\2\u0333")
buf.write("\u0331\3\2\2\2\u0333\u0334\3\2\2\2\u0334\u009e\3\2\2\2")
buf.write("\u0335\u0338\5\u009bN\2\u0336\u0338\7a\2\2\u0337\u0335")
buf.write("\3\2\2\2\u0337\u0336\3\2\2\2\u0338\u00a0\3\2\2\2\u0339")
buf.write("\u033b\7\62\2\2\u033a\u033c\5\u0095K\2\u033b\u033a\3\2")
buf.write("\2\2\u033b\u033c\3\2\2\2\u033c\u033d\3\2\2\2\u033d\u033e")
buf.write("\5\u00a3R\2\u033e\u00a2\3\2\2\2\u033f\u0344\5\u00a5S\2")
buf.write("\u0340\u0342\5\u00a7T\2\u0341\u0340\3\2\2\2\u0341\u0342")
buf.write("\3\2\2\2\u0342\u0343\3\2\2\2\u0343\u0345\5\u00a5S\2\u0344")
buf.write("\u0341\3\2\2\2\u0344\u0345\3\2\2\2\u0345\u00a4\3\2\2\2")
buf.write("\u0346\u0347\t\6\2\2\u0347\u00a6\3\2\2\2\u0348\u034a\5")
buf.write("\u00a9U\2\u0349\u0348\3\2\2\2\u034a\u034b\3\2\2\2\u034b")
buf.write("\u0349\3\2\2\2\u034b\u034c\3\2\2\2\u034c\u00a8\3\2\2\2")
buf.write("\u034d\u0350\5\u00a5S\2\u034e\u0350\7a\2\2\u034f\u034d")
buf.write("\3\2\2\2\u034f\u034e\3\2\2\2\u0350\u00aa\3\2\2\2\u0351")
buf.write("\u0352\7\62\2\2\u0352\u0353\t\7\2\2\u0353\u0354\5\u00ad")
buf.write("W\2\u0354\u00ac\3\2\2\2\u0355\u035a\5\u00afX\2\u0356\u0358")
buf.write("\5\u00b1Y\2\u0357\u0356\3\2\2\2\u0357\u0358\3\2\2\2\u0358")
buf.write("\u0359\3\2\2\2\u0359\u035b\5\u00afX\2\u035a\u0357\3\2")
buf.write("\2\2\u035a\u035b\3\2\2\2\u035b\u00ae\3\2\2\2\u035c\u035d")
buf.write("\t\b\2\2\u035d\u00b0\3\2\2\2\u035e\u0360\5\u00b3Z\2\u035f")
buf.write("\u035e\3\2\2\2\u0360\u0361\3\2\2\2\u0361\u035f\3\2\2\2")
buf.write("\u0361\u0362\3\2\2\2\u0362\u00b2\3\2\2\2\u0363\u0366\5")
buf.write("\u00afX\2\u0364\u0366\7a\2\2\u0365\u0363\3\2\2\2\u0365")
buf.write("\u0364\3\2\2\2\u0366\u00b4\3\2\2\2\u0367\u036a\5\u00b7")
buf.write("\\\2\u0368\u036a\5\u00c3b\2\u0369\u0367\3\2\2\2\u0369")
buf.write("\u0368\3\2\2\2\u036a\u00b6\3\2\2\2\u036b\u036c\5\u008b")
buf.write("F\2\u036c\u036e\7\60\2\2\u036d\u036f\5\u008bF\2\u036e")
buf.write("\u036d\3\2\2\2\u036e\u036f\3\2\2\2\u036f\u0371\3\2\2\2")
buf.write("\u0370\u0372\5\u00b9]\2\u0371\u0370\3\2\2\2\u0371\u0372")
buf.write("\3\2\2\2\u0372\u0374\3\2\2\2\u0373\u0375\5\u00c1a\2\u0374")
buf.write("\u0373\3\2\2\2\u0374\u0375\3\2\2\2\u0375\u0387\3\2\2\2")
buf.write("\u0376\u0377\7\60\2\2\u0377\u0379\5\u008bF\2\u0378\u037a")
buf.write("\5\u00b9]\2\u0379\u0378\3\2\2\2\u0379\u037a\3\2\2\2\u037a")
buf.write("\u037c\3\2\2\2\u037b\u037d\5\u00c1a\2\u037c\u037b\3\2")
buf.write("\2\2\u037c\u037d\3\2\2\2\u037d\u0387\3\2\2\2\u037e\u037f")
buf.write("\5\u008bF\2\u037f\u0381\5\u00b9]\2\u0380\u0382\5\u00c1")
buf.write("a\2\u0381\u0380\3\2\2\2\u0381\u0382\3\2\2\2\u0382\u0387")
buf.write("\3\2\2\2\u0383\u0384\5\u008bF\2\u0384\u0385\5\u00c1a\2")
buf.write("\u0385\u0387\3\2\2\2\u0386\u036b\3\2\2\2\u0386\u0376\3")
buf.write("\2\2\2\u0386\u037e\3\2\2\2\u0386\u0383\3\2\2\2\u0387\u00b8")
buf.write("\3\2\2\2\u0388\u0389\5\u00bb^\2\u0389\u038a\5\u00bd_\2")
buf.write("\u038a\u00ba\3\2\2\2\u038b\u038c\t\t\2\2\u038c\u00bc\3")
buf.write("\2\2\2\u038d\u038f\5\u00bf`\2\u038e\u038d\3\2\2\2\u038e")
buf.write("\u038f\3\2\2\2\u038f\u0390\3\2\2\2\u0390\u0391\5\u008b")
buf.write("F\2\u0391\u00be\3\2\2\2\u0392\u0393\t\n\2\2\u0393\u00c0")
buf.write("\3\2\2\2\u0394\u0395\t\13\2\2\u0395\u00c2\3\2\2\2\u0396")
buf.write("\u0397\5\u00c5c\2\u0397\u0399\5\u00c7d\2\u0398\u039a\5")
buf.write("\u00c1a\2\u0399\u0398\3\2\2\2\u0399\u039a\3\2\2\2\u039a")
buf.write("\u00c4\3\2\2\2\u039b\u039d\5\u0097L\2\u039c\u039e\7\60")
buf.write("\2\2\u039d\u039c\3\2\2\2\u039d\u039e\3\2\2\2\u039e\u03a7")
buf.write("\3\2\2\2\u039f\u03a0\7\62\2\2\u03a0\u03a2\t\4\2\2\u03a1")
buf.write("\u03a3\5\u0099M\2\u03a2\u03a1\3\2\2\2\u03a2\u03a3\3\2")
buf.write("\2\2\u03a3\u03a4\3\2\2\2\u03a4\u03a5\7\60\2\2\u03a5\u03a7")
buf.write("\5\u0099M\2\u03a6\u039b\3\2\2\2\u03a6\u039f\3\2\2\2\u03a7")
buf.write("\u00c6\3\2\2\2\u03a8\u03a9\5\u00c9e\2\u03a9\u03aa\5\u00bd")
buf.write("_\2\u03aa\u00c8\3\2\2\2\u03ab\u03ac\t\f\2\2\u03ac\u00ca")
buf.write("\3\2\2\2\u03ad\u03ae\7v\2\2\u03ae\u03af\7t\2\2\u03af\u03b0")
buf.write("\7w\2\2\u03b0\u03b7\7g\2\2\u03b1\u03b2\7h\2\2\u03b2\u03b3")
buf.write("\7c\2\2\u03b3\u03b4\7n\2\2\u03b4\u03b5\7u\2\2\u03b5\u03b7")
buf.write("\7g\2\2\u03b6\u03ad\3\2\2\2\u03b6\u03b1\3\2\2\2\u03b7")
buf.write("\u00cc\3\2\2\2\u03b8\u03b9\7)\2\2\u03b9\u03ba\5\u00cf")
buf.write("h\2\u03ba\u03bb\7)\2\2\u03bb\u03c1\3\2\2\2\u03bc\u03bd")
buf.write("\7)\2\2\u03bd\u03be\5\u00d7l\2\u03be\u03bf\7)\2\2\u03bf")
buf.write("\u03c1\3\2\2\2\u03c0\u03b8\3\2\2\2\u03c0\u03bc\3\2\2\2")
buf.write("\u03c1\u00ce\3\2\2\2\u03c2\u03c3\n\r\2\2\u03c3\u00d0\3")
buf.write("\2\2\2\u03c4\u03c6\7$\2\2\u03c5\u03c7\5\u00d3j\2\u03c6")
buf.write("\u03c5\3\2\2\2\u03c6\u03c7\3\2\2\2\u03c7\u03c8\3\2\2\2")
buf.write("\u03c8\u03c9\7$\2\2\u03c9\u00d2\3\2\2\2\u03ca\u03cc\5")
buf.write("\u00d5k\2\u03cb\u03ca\3\2\2\2\u03cc\u03cd\3\2\2\2\u03cd")
buf.write("\u03cb\3\2\2\2\u03cd\u03ce\3\2\2\2\u03ce\u00d4\3\2\2\2")
buf.write("\u03cf\u03d2\n\16\2\2\u03d0\u03d2\5\u00d7l\2\u03d1\u03cf")
buf.write("\3\2\2\2\u03d1\u03d0\3\2\2\2\u03d2\u00d6\3\2\2\2\u03d3")
buf.write("\u03d4\7^\2\2\u03d4\u03d8\t\17\2\2\u03d5\u03d8\5\u00d9")
buf.write("m\2\u03d6\u03d8\5\u00ddo\2\u03d7\u03d3\3\2\2\2\u03d7\u03d5")
buf.write("\3\2\2\2\u03d7\u03d6\3\2\2\2\u03d8\u00d8\3\2\2\2\u03d9")
buf.write("\u03da\7^\2\2\u03da\u03e5\5\u00a5S\2\u03db\u03dc\7^\2")
buf.write("\2\u03dc\u03dd\5\u00a5S\2\u03dd\u03de\5\u00a5S\2\u03de")
buf.write("\u03e5\3\2\2\2\u03df\u03e0\7^\2\2\u03e0\u03e1\5\u00db")
buf.write("n\2\u03e1\u03e2\5\u00a5S\2\u03e2\u03e3\5\u00a5S\2\u03e3")
buf.write("\u03e5\3\2\2\2\u03e4\u03d9\3\2\2\2\u03e4\u03db\3\2\2\2")
buf.write("\u03e4\u03df\3\2\2\2\u03e5\u00da\3\2\2\2\u03e6\u03e7\t")
buf.write("\20\2\2\u03e7\u00dc\3\2\2\2\u03e8\u03ea\7^\2\2\u03e9\u03eb")
buf.write("\7w\2\2\u03ea\u03e9\3\2\2\2\u03eb\u03ec\3\2\2\2\u03ec")
buf.write("\u03ea\3\2\2\2\u03ec\u03ed\3\2\2\2\u03ed\u03ee\3\2\2\2")
buf.write("\u03ee\u03ef\5\u009bN\2\u03ef\u03f0\5\u009bN\2\u03f0\u03f1")
buf.write("\5\u009bN\2\u03f1\u03f2\5\u009bN\2\u03f2\u00de\3\2\2\2")
buf.write("\u03f3\u03f4\7p\2\2\u03f4\u03f5\7w\2\2\u03f5\u03f6\7n")
buf.write("\2\2\u03f6\u03f7\7n\2\2\u03f7\u00e0\3\2\2\2\u03f8\u03f9")
buf.write("\7*\2\2\u03f9\u00e2\3\2\2\2\u03fa\u03fb\7+\2\2\u03fb\u00e4")
buf.write("\3\2\2\2\u03fc\u03fd\7}\2\2\u03fd\u00e6\3\2\2\2\u03fe")
buf.write("\u03ff\7\177\2\2\u03ff\u00e8\3\2\2\2\u0400\u0401\7]\2")
buf.write("\2\u0401\u00ea\3\2\2\2\u0402\u0403\7_\2\2\u0403\u00ec")
buf.write("\3\2\2\2\u0404\u0405\7=\2\2\u0405\u00ee\3\2\2\2\u0406")
buf.write("\u0407\7.\2\2\u0407\u00f0\3\2\2\2\u0408\u0409\7\60\2\2")
buf.write("\u0409\u00f2\3\2\2\2\u040a\u040b\7\60\2\2\u040b\u040c")
buf.write("\7\60\2\2\u040c\u040d\7\60\2\2\u040d\u00f4\3\2\2\2\u040e")
buf.write("\u040f\7B\2\2\u040f\u00f6\3\2\2\2\u0410\u0411\7<\2\2\u0411")
buf.write("\u0412\7<\2\2\u0412\u00f8\3\2\2\2\u0413\u0414\7?\2\2\u0414")
buf.write("\u00fa\3\2\2\2\u0415\u0416\7@\2\2\u0416\u00fc\3\2\2\2")
buf.write("\u0417\u0418\7>\2\2\u0418\u00fe\3\2\2\2\u0419\u041a\7")
buf.write("#\2\2\u041a\u0100\3\2\2\2\u041b\u041c\7\u0080\2\2\u041c")
buf.write("\u0102\3\2\2\2\u041d\u041e\7A\2\2\u041e\u0104\3\2\2\2")
buf.write("\u041f\u0420\7<\2\2\u0420\u0106\3\2\2\2\u0421\u0422\7")
buf.write("/\2\2\u0422\u0423\7@\2\2\u0423\u0108\3\2\2\2\u0424\u0425")
buf.write("\7?\2\2\u0425\u0426\7?\2\2\u0426\u010a\3\2\2\2\u0427\u0428")
buf.write("\7>\2\2\u0428\u0429\7?\2\2\u0429\u010c\3\2\2\2\u042a\u042b")
buf.write("\7@\2\2\u042b\u042c\7?\2\2\u042c\u010e\3\2\2\2\u042d\u042e")
buf.write("\7#\2\2\u042e\u042f\7?\2\2\u042f\u0110\3\2\2\2\u0430\u0431")
buf.write("\7(\2\2\u0431\u0432\7(\2\2\u0432\u0112\3\2\2\2\u0433\u0434")
buf.write("\7~\2\2\u0434\u0435\7~\2\2\u0435\u0114\3\2\2\2\u0436\u0437")
buf.write("\7-\2\2\u0437\u0438\7-\2\2\u0438\u0116\3\2\2\2\u0439\u043a")
buf.write("\7/\2\2\u043a\u043b\7/\2\2\u043b\u0118\3\2\2\2\u043c\u043d")
buf.write("\7-\2\2\u043d\u011a\3\2\2\2\u043e\u043f\7/\2\2\u043f\u011c")
buf.write("\3\2\2\2\u0440\u0441\7,\2\2\u0441\u011e\3\2\2\2\u0442")
buf.write("\u0443\7\61\2\2\u0443\u0120\3\2\2\2\u0444\u0445\7(\2\2")
buf.write("\u0445\u0122\3\2\2\2\u0446\u0447\7~\2\2\u0447\u0124\3")
buf.write("\2\2\2\u0448\u0449\7`\2\2\u0449\u0126\3\2\2\2\u044a\u044b")
buf.write("\7\'\2\2\u044b\u0128\3\2\2\2\u044c\u044d\7-\2\2\u044d")
buf.write("\u044e\7?\2\2\u044e\u012a\3\2\2\2\u044f\u0450\7/\2\2\u0450")
buf.write("\u0451\7?\2\2\u0451\u012c\3\2\2\2\u0452\u0453\7,\2\2\u0453")
buf.write("\u0454\7?\2\2\u0454\u012e\3\2\2\2\u0455\u0456\7\61\2\2")
buf.write("\u0456\u0457\7?\2\2\u0457\u0130\3\2\2\2\u0458\u0459\7")
buf.write("(\2\2\u0459\u045a\7?\2\2\u045a\u0132\3\2\2\2\u045b\u045c")
buf.write("\7~\2\2\u045c\u045d\7?\2\2\u045d\u0134\3\2\2\2\u045e\u045f")
buf.write("\7`\2\2\u045f\u0460\7?\2\2\u0460\u0136\3\2\2\2\u0461\u0462")
buf.write("\7\'\2\2\u0462\u0463\7?\2\2\u0463\u0138\3\2\2\2\u0464")
buf.write("\u0465\7>\2\2\u0465\u0466\7>\2\2\u0466\u0467\7?\2\2\u0467")
buf.write("\u013a\3\2\2\2\u0468\u0469\7@\2\2\u0469\u046a\7@\2\2\u046a")
buf.write("\u046b\7?\2\2\u046b\u013c\3\2\2\2\u046c\u046d\7@\2\2\u046d")
buf.write("\u046e\7@\2\2\u046e\u046f\7@\2\2\u046f\u0470\7?\2\2\u0470")
buf.write("\u013e\3\2\2\2\u0471\u0475\5\u0141\u00a1\2\u0472\u0474")
buf.write("\5\u0143\u00a2\2\u0473\u0472\3\2\2\2\u0474\u0477\3\2\2")
buf.write("\2\u0475\u0473\3\2\2\2\u0475\u0476\3\2\2\2\u0476\u0140")
buf.write("\3\2\2\2\u0477\u0475\3\2\2\2\u0478\u047d\t\21\2\2\u0479")
buf.write("\u047d\n\22\2\2\u047a\u047b\t\23\2\2\u047b\u047d\t\24")
buf.write("\2\2\u047c\u0478\3\2\2\2\u047c\u0479\3\2\2\2\u047c\u047a")
buf.write("\3\2\2\2\u047d\u0142\3\2\2\2\u047e\u0483\t\25\2\2\u047f")
buf.write("\u0483\n\22\2\2\u0480\u0481\t\23\2\2\u0481\u0483\t\24")
buf.write("\2\2\u0482\u047e\3\2\2\2\u0482\u047f\3\2\2\2\u0482\u0480")
buf.write("\3\2\2\2\u0483\u0144\3\2\2\2\u0484\u0486\t\26\2\2\u0485")
buf.write("\u0484\3\2\2\2\u0486\u0487\3\2\2\2\u0487\u0485\3\2\2\2")
buf.write("\u0487\u0488\3\2\2\2\u0488\u0489\3\2\2\2\u0489\u048a\b")
buf.write("\u00a3\2\2\u048a\u0146\3\2\2\2\u048b\u048c\7\61\2\2\u048c")
buf.write("\u048d\7,\2\2\u048d\u0491\3\2\2\2\u048e\u0490\13\2\2\2")
buf.write("\u048f\u048e\3\2\2\2\u0490\u0493\3\2\2\2\u0491\u0492\3")
buf.write("\2\2\2\u0491\u048f\3\2\2\2\u0492\u0494\3\2\2\2\u0493\u0491")
buf.write("\3\2\2\2\u0494\u0495\7,\2\2\u0495\u0496\7\61\2\2\u0496")
buf.write("\u0497\3\2\2\2\u0497\u0498\b\u00a4\2\2\u0498\u0148\3\2")
buf.write("\2\2\u0499\u049a\7\61\2\2\u049a\u049b\7\61\2\2\u049b\u049f")
buf.write("\3\2\2\2\u049c\u049e\n\27\2\2\u049d\u049c\3\2\2\2\u049e")
buf.write("\u04a1\3\2\2\2\u049f\u049d\3\2\2\2\u049f\u04a0\3\2\2\2")
buf.write("\u04a0\u04a2\3\2\2\2\u04a1\u049f\3\2\2\2\u04a2\u04a3\b")
buf.write("\u00a5\2\2\u04a3\u014a\3\2\2\29\2\u02e8\u02ec\u02f0\u02f4")
buf.write("\u02f8\u02ff\u0304\u0306\u030a\u030d\u0311\u0318\u031c")
buf.write("\u0321\u0329\u032c\u0333\u0337\u033b\u0341\u0344\u034b")
buf.write("\u034f\u0357\u035a\u0361\u0365\u0369\u036e\u0371\u0374")
buf.write("\u0379\u037c\u0381\u0386\u038e\u0399\u039d\u03a2\u03a6")
buf.write("\u03b6\u03c0\u03c6\u03cd\u03d1\u03d7\u03e4\u03ec\u0475")
buf.write("\u047c\u0482\u0487\u0491\u049f\3\2\3\2")
return buf.getvalue()
class Java9Lexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
T__2 = 3
T__3 = 4
T__4 = 5
T__5 = 6
T__6 = 7
T__7 = 8
T__8 = 9
T__9 = 10
ABSTRACT = 11
ASSERT = 12
BOOLEAN = 13
BREAK = 14
BYTE = 15
CASE = 16
CATCH = 17
CHAR = 18
CLASS = 19
CONST = 20
CONTINUE = 21
DEFAULT = 22
DO = 23
DOUBLE = 24
ELSE = 25
ENUM = 26
EXTENDS = 27
FINAL = 28
FINALLY = 29
FLOAT = 30
FOR = 31
IF = 32
GOTO = 33
IMPLEMENTS = 34
IMPORT = 35
INSTANCEOF = 36
INT = 37
INTERFACE = 38
LONG = 39
NATIVE = 40
NEW = 41
PACKAGE = 42
PRIVATE = 43
PROTECTED = 44
PUBLIC = 45
RETURN = 46
SHORT = 47
STATIC = 48
STRICTFP = 49
SUPER = 50
SWITCH = 51
SYNCHRONIZED = 52
THIS = 53
THROW = 54
THROWS = 55
TRANSIENT = 56
TRY = 57
VOID = 58
VOLATILE = 59
WHILE = 60
UNDER_SCORE = 61
IntegerLiteral = 62
FloatingPointLiteral = 63
BooleanLiteral = 64
CharacterLiteral = 65
StringLiteral = 66
NullLiteral = 67
LPAREN = 68
RPAREN = 69
LBRACE = 70
RBRACE = 71
LBRACK = 72
RBRACK = 73
SEMI = 74
COMMA = 75
DOT = 76
ELLIPSIS = 77
AT = 78
COLONCOLON = 79
ASSIGN = 80
GT = 81
LT = 82
BANG = 83
TILDE = 84
QUESTION = 85
COLON = 86
ARROW = 87
EQUAL = 88
LE = 89
GE = 90
NOTEQUAL = 91
AND = 92
OR = 93
INC = 94
DEC = 95
ADD = 96
SUB = 97
MUL = 98
DIV = 99
BITAND = 100
BITOR = 101
CARET = 102
MOD = 103
ADD_ASSIGN = 104
SUB_ASSIGN = 105
MUL_ASSIGN = 106
DIV_ASSIGN = 107
AND_ASSIGN = 108
OR_ASSIGN = 109
XOR_ASSIGN = 110
MOD_ASSIGN = 111
LSHIFT_ASSIGN = 112
RSHIFT_ASSIGN = 113
URSHIFT_ASSIGN = 114
Identifier = 115
WS = 116
COMMENT = 117
LINE_COMMENT = 118
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'open'", "'module'", "'requires'", "'exports'", "'to'", "'opens'",
"'uses'", "'provides'", "'with'", "'transitive'", "'abstract'",
"'assert'", "'boolean'", "'break'", "'byte'", "'case'", "'catch'",
"'char'", "'class'", "'const'", "'continue'", "'default'", "'do'",
"'double'", "'else'", "'enum'", "'extends'", "'final'", "'finally'",
"'float'", "'for'", "'if'", "'goto'", "'implements'", "'import'",
"'instanceof'", "'int'", "'interface'", "'long'", "'native'",
"'new'", "'package'", "'private'", "'protected'", "'public'",
"'return'", "'short'", "'static'", "'strictfp'", "'super'",
"'switch'", "'synchronized'", "'this'", "'throw'", "'throws'",
"'transient'", "'try'", "'void'", "'volatile'", "'while'", "'_'",
"'null'", "'('", "')'", "'{'", "'}'", "'['", "']'", "';'", "','",
"'.'", "'...'", "'@'", "'::'", "'='", "'>'", "'<'", "'!'", "'~'",
"'?'", "':'", "'->'", "'=='", "'<='", "'>='", "'!='", "'&&'",
"'||'", "'++'", "'--'", "'+'", "'-'", "'*'", "'/'", "'&'", "'|'",
"'^'", "'%'", "'+='", "'-='", "'*='", "'/='", "'&='", "'|='",
"'^='", "'%='", "'<<='", "'>>='", "'>>>='" ]
symbolicNames = [ "<INVALID>",
"ABSTRACT", "ASSERT", "BOOLEAN", "BREAK", "BYTE", "CASE", "CATCH",
"CHAR", "CLASS", "CONST", "CONTINUE", "DEFAULT", "DO", "DOUBLE",
"ELSE", "ENUM", "EXTENDS", "FINAL", "FINALLY", "FLOAT", "FOR",
"IF", "GOTO", "IMPLEMENTS", "IMPORT", "INSTANCEOF", "INT", "INTERFACE",
"LONG", "NATIVE", "NEW", "PACKAGE", "PRIVATE", "PROTECTED",
"PUBLIC", "RETURN", "SHORT", "STATIC", "STRICTFP", "SUPER",
"SWITCH", "SYNCHRONIZED", "THIS", "THROW", "THROWS", "TRANSIENT",
"TRY", "VOID", "VOLATILE", "WHILE", "UNDER_SCORE", "IntegerLiteral",
"FloatingPointLiteral", "BooleanLiteral", "CharacterLiteral",
"StringLiteral", "NullLiteral", "LPAREN", "RPAREN", "LBRACE",
"RBRACE", "LBRACK", "RBRACK", "SEMI", "COMMA", "DOT", "ELLIPSIS",
"AT", "COLONCOLON", "ASSIGN", "GT", "LT", "BANG", "TILDE", "QUESTION",
"COLON", "ARROW", "EQUAL", "LE", "GE", "NOTEQUAL", "AND", "OR",
"INC", "DEC", "ADD", "SUB", "MUL", "DIV", "BITAND", "BITOR",
"CARET", "MOD", "ADD_ASSIGN", "SUB_ASSIGN", "MUL_ASSIGN", "DIV_ASSIGN",
"AND_ASSIGN", "OR_ASSIGN", "XOR_ASSIGN", "MOD_ASSIGN", "LSHIFT_ASSIGN",
"RSHIFT_ASSIGN", "URSHIFT_ASSIGN", "Identifier", "WS", "COMMENT",
"LINE_COMMENT" ]
ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
"T__7", "T__8", "T__9", "ABSTRACT", "ASSERT", "BOOLEAN",
"BREAK", "BYTE", "CASE", "CATCH", "CHAR", "CLASS", "CONST",
"CONTINUE", "DEFAULT", "DO", "DOUBLE", "ELSE", "ENUM",
"EXTENDS", "FINAL", "FINALLY", "FLOAT", "FOR", "IF", "GOTO",
"IMPLEMENTS", "IMPORT", "INSTANCEOF", "INT", "INTERFACE",
"LONG", "NATIVE", "NEW", "PACKAGE", "PRIVATE", "PROTECTED",
"PUBLIC", "RETURN", "SHORT", "STATIC", "STRICTFP", "SUPER",
"SWITCH", "SYNCHRONIZED", | |
<gh_stars>1-10
#------------------------------------------------------------------------------
#
# DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
# Version 2, December 2004
#
# Copyright (C) 2013 <NAME> <<EMAIL>>
#
# Everyone is permitted to copy and distribute verbatim or modified
# copies of this license document, and changing it is allowed as long
# as the name is changed.
#
# DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
# TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
#
# 0. You just DO WHAT THE FUCK YOU WANT TO.
#
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# mas_blocks.py - Blocks found in Massive scene files. (.mas)
#------------------------------------------------------------------------------
import itertools
import os
import re
from scanf import sscanf
from scanf import IncompleteCaptureError
from block import Block
from common import Variable
#------------------------------------------------------------------------------
# class DisplayOptionsBlock
#------------------------------------------------------------------------------
class DisplayOptionsBlock(Block):
"""User interface display options.
"""
#--------------------------------------------------------------------------
# statics
#--------------------------------------------------------------------------
_sBlockFormatting = [
"shade %d",
"shadows %d",
"shadow_bias %g",
"shadow_colour %g %g %g",
"shadow_map_resolution %d",
"backfaces %d",
"blocky %d", # never parsed?
"cameras %d",
"lanes %d",
"lights %d",
"filmback %d",
"crop_filmback %d",
"masks %d",
"heads_up %d",
"grid %d %f %f",
"motion_blur %d %d %f",
"antialias %d",
"standins %d",
"sprites %d",
"statistics %d",
"time %d",
"time_in_frames %d",
"vision %d %d",
"sound_emission %d",
"sound_reception %d",
"sound_rules %d",
"links %d",
"connected %d",
"material_links %d",
"cloth_active %d",
"hair_active %d",
"terrain %d",
"shade_terrain %d",
"terrain_lines %d",
"terrain_map %d",
"terrain_alpha %d",
"terrain_toggle %s",
"subdiv_quality %d %f",
"subdivs %d",
"agent_fields %d",
"playbacks %d", # always written out as 1?
"triggers %d", # always written out as 1?
"wind_display_scale %g",
"handle_scale %g",
"render_pass %s",
]
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def __init__(self, block):
"""Initialize self with scene data.
"""
super(DisplayOptionsBlock, self).__init__()
special_list = { "terrain_toggle" : self._parseAttributeString }
self.parseAttributes(
block, DisplayOptionsBlock._sBlockFormatting, special_list)
#--------------------------------------------------------------------------
def __str__(self):
block = self.printAttributes(DisplayOptionsBlock._sBlockFormatting)
return "Display options\n%sEnd display options" % self._addIndent(block)
#------------------------------------------------------------------------------
# class TerrainsBlock
#------------------------------------------------------------------------------
class TerrainsBlock(Block):
"""Scene terrains.
"""
#--------------------------------------------------------------------------
# statics
#--------------------------------------------------------------------------
_sBlockFormatting = [
"texture_process %d",
"texture_process_width %d",
"texture_process_multiply %g",
"texture_process_skip %d",
"texture_process_paint_planes %d",
]
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def __init__(self, block):
"""Initialize self with raw block scene data.
"""
super(TerrainsBlock, self).__init__()
self.terrains = []
special_list = { "terrain" : self._parseTerrain }
self.parseAttributes(block, TerrainsBlock._sBlockFormatting, special_list)
#--------------------------------------------------------------------------
def __str__(self):
terrains = "".join(map(str, self.terrains))
attributes = self.printAttributes(TerrainsBlock._sBlockFormatting)
block = "%s%s" % (terrains, attributes)
return "Terrains\n%sEnd terrains" % self._addIndent(block)
#--------------------------------------------------------------------------
# helper methods
#--------------------------------------------------------------------------
def _parseTerrain(self, block):
"""Collate all of the terrains in the scene.
"""
self.terrains.append(TerrainNode(block))
#------------------------------------------------------------------------------
# class TerrainNode
#------------------------------------------------------------------------------
class TerrainNode(Block):
"""Terrain in the scene.
"""
#--------------------------------------------------------------------------
# statics
#--------------------------------------------------------------------------
_sBlockFormatting = [
"translate %d %d",
"model %s",
"texmap %s",
"display",
"active",
"dynamic",
"flip_normals",
"visible_to_agents",
"render_pass %s",
"ambient %g %g %g %s",
"diffuse %g %g %g %s",
"specular %g %g %g %s",
"roughness %g",
"shader %s",
"displacement %s",
"displ_bounds %s",
]
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def __init__(self, block):
"""Initialize self with scene data.
"""
super(TerrainNode, self).__init__()
# remove the block header
header, block = block.partition('\n')[::2]
self._parseHeader(header)
# remove indent from block
block = self._removeIndent(block)
# setup special parse list
special_list = {
"render_pass" : self._parseAttributeString,
"shader" : self._parseAttributeString,
"displacement" : self._parseAttributeString,
"displ_bounds" : self._parseAttributeString
}
# parse the attributes
self.parseAttributes(block, TerrainNode._sBlockFormatting, special_list)
#--------------------------------------------------------------------------
def __str__(self):
header = "terrain %s" % self.name
block = self.printAttributes(TerrainNode._sBlockFormatting)
return "%s\n%s" % (header, self._addIndent(block))
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def _parseHeader(self, header):
"""Terrain block header contains terrain name.
"""
formating = "terrain %s"
(self.name,) = sscanf(header, formating)
#------------------------------------------------------------------------------
# class CamerasBlock
#------------------------------------------------------------------------------
class CamerasBlock(Block):
"""Scene cameras and camera clipping planes.
"""
#--------------------------------------------------------------------------
# statics
#--------------------------------------------------------------------------
_sBlockFormatting = [
"clipping_planes %d",
"clipping_plane %d [%g %g %g %g]",
]
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def __init__(self, block):
"""Initialize self with raw block scene data.
"""
super(CamerasBlock, self).__init__()
self.cameras = []
special_list = { "camera" : self._parseCamera }
self.parseAttributes(block, CamerasBlock._sBlockFormatting, special_list)
#--------------------------------------------------------------------------
def __str__(self):
cameras = "".join(map(str, self.cameras))
attributes = self.printAttributes(CamerasBlock._sBlockFormatting)
block = "%s%s" % (cameras, attributes)
return "Cameras\n%sEnd cameras" % self._addIndent(block)
#--------------------------------------------------------------------------
# helper methods
#--------------------------------------------------------------------------
def _parseCamera(self, block):
"""Collate all of the cameras in the scene.
"""
self.cameras.append(CameraNode(block))
#------------------------------------------------------------------------------
# class CameraNode
#------------------------------------------------------------------------------
class CameraNode(Block):
"""Camera in the scene.
"""
#--------------------------------------------------------------------------
# statics
#--------------------------------------------------------------------------
_sBlockFormatting = [
"translate %d %d", # parse manually
"fov %g",
"filmback %g %g",
"zrange %g %g",
"pixel_aspect %g",
"bgpic %s %d",
"order %s", # not sure what this is
"keyable %s", # cx, cy, cz, tx, ty, tz, rx, ry, rz
"file %s",
"frame_offset %d",
"translate %g %g %g",
"rotate %g %g %g",
"pivot %g %g %g",
"display_fustrum",
"constrain %s", # follow_xz | follow_3d | lookat | pov | agent
"constrain %s %s",
"filter %d %f",
"volume \"%s\" %s %s",
"lens \"%s\" %s %s",
]
#--------------------------------------------------------------------------
# enums
#--------------------------------------------------------------------------
class kConstrain:
LookAt = 'lookat'
Agent = 'agent'
Pov = 'pov'
FollowXZ = 'follow_xy'
Follow3D = 'follow_3d'
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def __init__(self, block):
"""Initialize self with scene data.
"""
super(CameraNode, self).__init__()
# remove the block header
header, block = block.partition('\n')[::2]
self._parseHeader(header)
# remove indent from block
block = self._removeIndent(block)
# first line contains node position for the ui, needs to be parsed
# seperately since the 'translate' tag is used twice
line, block = block.partition('\n')[::2]
self.position = sscanf(line, CameraNode._sBlockFormatting[0])
# setup special parse list
special_list = {
"keyable" : self._parseAttributeString
}
# parse the rest of the attributes
self.parseAttributes(
block, CameraNode._sBlockFormatting[1:], special_list)
#--------------------------------------------------------------------------
def __str__(self):
header = ("camera %s *" if self.selected else "camera %s") % self.name
position = CameraNode._sBlockFormatting[0] % self.position
attributes = self.printAttributes(CameraNode._sBlockFormatting[1:])
block = "%s\n%s" % (position, attributes)
return "%s\n%s" % (header, self._addIndent(block))
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def _parseHeader(self, header):
"""Camera block header contains camera name and selection status.
"""
normal = "camera %s"
selected = "camera %s *"
# check if camera selected
try:
(self.name,) = sscanf(header, selected)
self.selected = True
except IncompleteCaptureError, e:
(self.name,) = sscanf(header, normal)
self.selected = False
#------------------------------------------------------------------------------
# class LightingBlock
#------------------------------------------------------------------------------
class LightingBlock(Block):
"""Scene lights.
"""
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def __init__(self, block):
"""Initialize self with raw block scene data.
"""
super(LightingBlock, self).__init__()
self.lights = []
special_list = { "light" : self._parseLight }
self.parseAttributes(block, [], special_list)
#--------------------------------------------------------------------------
def __str__(self):
block = "".join(map(str, self.lights))
return "Lighting\n%sEnd lighting" % self._addIndent(block)
#--------------------------------------------------------------------------
# helper methods
#--------------------------------------------------------------------------
def _parseLight(self, block):
"""Collate all of the lights in the scene.
"""
self.lights.append(LightNode(block))
#------------------------------------------------------------------------------
# class LightNode
#------------------------------------------------------------------------------
class LightNode(Block):
"""Light in the scene.
"""
#--------------------------------------------------------------------------
# statics
#--------------------------------------------------------------------------
# [moiz] skipping most of the attributes here, mostly useless and high
# complexity with very minimum reward
_sBlockFormatting = [
"translate %d %d",
"colour %g %g %g",
"intensity %g",
"type %s" # ambient, directional, point, spot
]
#--------------------------------------------------------------------------
# enums
#--------------------------------------------------------------------------
class kType:
Ambient = 'ambient'
Directional = 'directional'
Point = 'point'
Spot = 'spot'
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def __init__(self, block):
"""Initialize self with scene data.
"""
super(LightNode, self).__init__()
# remove the block header
header, block = block.partition('\n')[::2]
self._parseHeader(header)
# remove indent from block
block = self._removeIndent(block)
# first line contains node position for the ui, needs to be parsed
# seperately since the 'translate' tag is used twice
line, block = block.partition('\n')[::2]
self.position = sscanf(line, LightNode._sBlockFormatting[0])
# parse the rest of the attributes
rest = self.parseAttributes(block, LightNode._sBlockFormatting[1:])
# save remaining attributes
self._raw = rest
#--------------------------------------------------------------------------
def __str__(self):
header = "light %s" % self.name
position = LightNode._sBlockFormatting[0] % self.position
attributes = self.printAttributes(LightNode._sBlockFormatting[1:])
block = "%s\n%s%s" % (position, attributes, self._raw)
return "%s\n%s" % (header, self._addIndent(block))
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def _parseHeader(self, header):
"""Light block header contains light name.
"""
formating = "light %s"
(self.name,) = sscanf(header, formating)
#------------------------------------------------------------------------------
# class RendersBlock
#------------------------------------------------------------------------------
class RendersBlock(Block):
"""Render options.
"""
#--------------------------------------------------------------------------
# methods
#--------------------------------------------------------------------------
def __init__(self, block):
"""Initialize self with raw block scene data.
"""
super(RendersBlock, self).__init__()
self.renders = []
special_list = { "render" : self._parseRender }
self.parseAttributes(block, [], special_list)
#--------------------------------------------------------------------------
def __str__(self):
block = "".join(map(str, self.renders))
return "Renders\n\n%sEnd renders" % self._addIndent(block)
#--------------------------------------------------------------------------
# helper methods
#--------------------------------------------------------------------------
def _parseRender(self, block):
"""Collate all of the render enties for the scene.
"""
self.renders.append(RenderOption(block))
#------------------------------------------------------------------------------
# class RenderOption
#------------------------------------------------------------------------------
class RenderOption(Block):
"""Render option settings.
"""
#--------------------------------------------------------------------------
# statics
#--------------------------------------------------------------------------
# [moiz] skipping most of the attributes here, mostly useless and high
# complexity with very minimum reward
_sBlockFormatting = [
"images %s",
"render_files %s",
"render_pass_name %s",
"camera %s",
"renderer %i", # prman=1, air=2, 3delight=3, velocity=4, mentalray=5, vary=6
"resolution_option %i",
"resolution %i | |
<reponame>MichaelDylan77/PrintTags
# -*- coding: utf-8 -*-
from datetime import datetime
from .colors import Colors
from typing import List, Tuple, TextIO, Optional, Callable, Any
def _get_datetime() -> str:
return datetime.now().strftime('%d-%b-%Y %I:%M:%S%p')
def _print_with_color(args: Tuple[Any, ...], color_fn: Callable[[str], str],
add_datetime: bool, prefixes: Tuple[Optional[str], ...],
sep: str, end: str, closed_ok: bool, file: Optional[TextIO],
flush: bool) -> None:
_args: List[str] = [str(arg) for arg in args]
for prefix in reversed(prefixes):
if prefix is None:
continue
# Add a space to the end of the prefix if is doesn't already have one
_args[0] = f'{prefix}{_args[0]}' if prefix.endswith(' ') else f'{prefix} {_args[0]}'
if add_datetime:
_args[0] = f'{_get_datetime()} {_args[0]}'
_args = [color_fn(arg) for arg in _args]
try:
print(*_args, sep=color_fn(sep), end=color_fn(end), file=file, flush=flush)
except ValueError:
if closed_ok:
pass
else:
raise
def black(*args: Any, add_datetime: bool = False, prefix: Optional[str] = None,
sep: str = ' ', end: str = '\n', closed_ok: bool = False,
file: Optional[TextIO] = None, flush: bool = False) -> None:
"""
Prints values in black.
Args:
add_datetime (bool, optional): Whether or not a datetime timestamp should be printed. Default `False`.
prefix (any, optional): A string interpolatable value that should be prepended to the print. Default `None`.
sep (str, optional): String inserted between values, default is a space. Default `' '`.
end (str, optional): String appended after the last value, default is a newline. Default `\n`.
closed_ok (bool, optional): Whether or not the ValueError raised by a closed stdout should be
suppressed. Default `False`.
file: A file-like object (stream, optional): Defaults to the current sys.stdout. Default `None`.
flush (bool, optional): Whether to forcibly flush the stream. Default `False`.
"""
_print_with_color(args, Colors.black, add_datetime, (prefix,), sep, end, closed_ok, file, flush)
def red(*args: Any, add_datetime: bool = False, prefix: Optional[str] = None,
sep: str = ' ', end: str = '\n', closed_ok: bool = False,
file: Optional[TextIO] = None, flush: bool = False) -> None:
"""
Prints values in red.
Args:
add_datetime (bool, optional): Whether or not a datetime timestamp should be printed. Default `False`.
prefix (any, optional): A string interpolatable value that should be prepended to the print. Default `None`.
sep (str, optional): String inserted between values, default is a space. Default `' '`.
end (str, optional): String appended after the last value, default is a newline. Default `\n`.
closed_ok (bool, optional): Whether or not the ValueError raised by a closed stdout should be
suppressed. Default `False`.
file: A file-like object (stream, optional): Defaults to the current sys.stdout. Default `None`.
flush (bool, optional): Whether to forcibly flush the stream. Default `False`.
"""
_print_with_color(args, Colors.red, add_datetime, (prefix,), sep, end, closed_ok, file, flush)
def green(*args: Any, add_datetime: bool = False, prefix: Optional[str] = None,
sep: str = ' ', end: str = '\n', closed_ok: bool = False,
file: Optional[TextIO] = None, flush: bool = False) -> None:
"""
Prints values in green.
Args:
add_datetime (bool, optional): Whether or not a datetime timestamp should be printed. Default `False`.
prefix (any, optional): A string interpolatable value that should be prepended to the print. Default `None`.
sep (str, optional): String inserted between values, default is a space. Default `' '`.
end (str, optional): String appended after the last value, default is a newline. Default `\n`.
closed_ok (bool, optional): Whether or not the ValueError raised by a closed stdout should be
suppressed. Default `False`.
file: A file-like object (stream, optional): Defaults to the current sys.stdout. Default `None`.
flush (bool, optional): Whether to forcibly flush the stream. Default `False`.
"""
_print_with_color(args, Colors.green, add_datetime, (prefix,), sep, end, closed_ok, file, flush)
def yellow(*args: Any, add_datetime: bool = False, prefix: Optional[str] = None,
sep: str = ' ', end: str = '\n', closed_ok: bool = False,
file: Optional[TextIO] = None, flush: bool = False) -> None:
"""
Prints values in yellow.
Args:
add_datetime (bool, optional): Whether or not a datetime timestamp should be printed. Default `False`.
prefix (any, optional): A string interpolatable value that should be prepended to the print. Default `None`.
sep (str, optional): String inserted between values, default is a space. Default `' '`.
end (str, optional): String appended after the last value, default is a newline. Default `\n`.
closed_ok (bool, optional): Whether or not the ValueError raised by a closed stdout should be
suppressed. Default `False`.
file: A file-like object (stream, optional): Defaults to the current sys.stdout. Default `None`.
flush (bool, optional): Whether to forcibly flush the stream. Default `False`.
"""
_print_with_color(args, Colors.yellow, add_datetime, (prefix,), sep, end, closed_ok, file, flush)
def blue(*args: Any, add_datetime: bool = False, prefix: Optional[str] = None,
sep: str = ' ', end: str = '\n', closed_ok: bool = False,
file: Optional[TextIO] = None, flush: bool = False) -> None:
"""
Prints values in blue.
Args:
add_datetime (bool, optional): Whether or not a datetime timestamp should be printed. Default `False`.
prefix (any, optional): A string interpolatable value that should be prepended to the print. Default `None`.
sep (str, optional): String inserted between values, default is a space. Default `' '`.
end (str, optional): String appended after the last value, default is a newline. Default `\n`.
closed_ok (bool, optional): Whether or not the ValueError raised by a closed stdout should be
suppressed. Default `False`.
file: A file-like object (stream, optional): Defaults to the current sys.stdout. Default `None`.
flush (bool, optional): Whether to forcibly flush the stream. Default `False`.
"""
_print_with_color(args, Colors.blue, add_datetime, (prefix,), sep, end, closed_ok, file, flush)
def magenta(*args: Any, add_datetime: bool = False, prefix: Optional[str] = None,
sep: str = ' ', end: str = '\n', closed_ok: bool = False,
file: Optional[TextIO] = None, flush: bool = False) -> None:
"""
Prints values in magenta.
Args:
add_datetime (bool, optional): Whether or not a datetime timestamp should be printed. Default `False`.
prefix (any, optional): A string interpolatable value that should be prepended to the print. Default `None`.
sep (str, optional): String inserted between values, default is a space. Default `' '`.
end (str, optional): String appended after the last value, default is a newline. Default `\n`.
closed_ok (bool, optional): Whether or not the ValueError raised by a closed stdout should be
suppressed. Default `False`.
file: A file-like object (stream, optional): Defaults to the current sys.stdout. Default `None`.
flush (bool, optional): Whether to forcibly flush the stream. Default `False`.
"""
_print_with_color(args, Colors.magenta, add_datetime, (prefix,), sep, end, closed_ok, file, flush)
def cyan(*args: Any, add_datetime: bool = False, prefix: Optional[str] = None,
sep: str = ' ', end: str = '\n', closed_ok: bool = False,
file: Optional[TextIO] = None, flush: bool = False) -> None:
"""
Prints values in cyan.
Args:
add_datetime (bool, optional): Whether or not a datetime timestamp should be printed. Default `False`.
prefix (any, optional): A string interpolatable value that should be prepended to the print. Default `None`.
sep (str, optional): String inserted between values, default is a space. Default `' '`.
end (str, optional): String appended after the last value, default is a newline. Default `\n`.
closed_ok (bool, optional): Whether or not the ValueError raised by a closed stdout should be
suppressed. Default `False`.
file: A file-like object (stream, optional): Defaults to the current sys.stdout. Default `None`.
flush (bool, optional): Whether to forcibly flush the stream. Default `False`.
"""
_print_with_color(args, Colors.cyan, add_datetime, (prefix,), sep, end, closed_ok, file, flush)
def white(*args: Any, add_datetime: bool = False, prefix: Optional[str] = None,
sep: str = ' ', end: str = '\n', closed_ok: bool = False,
file: Optional[TextIO] = None, flush: bool = False) -> None:
"""
Prints values in white.
Args:
add_datetime (bool, optional): Whether or not a datetime timestamp should be printed. Default `False`.
prefix (any, optional): A string interpolatable value that should be prepended to the print. Default `None`.
sep (str, optional): String inserted between values, default is a space. Default `' '`.
end (str, optional): String appended after the last value, default is a newline. Default `\n`.
closed_ok (bool, optional): Whether or not the ValueError raised by a closed stdout should be
suppressed. | |
import re
import pandas as pd
import numpy as np
import time
import constants
import pickle
# Cleansing
def cleanse(text):
"""
Clean up the text a little by removing punctuation, extra spaces, new lines, etc.
This should be run after split_to_sentences(), tokenize_by_sentence() because I think it removes all the
punctuation you will need to split the text into sentences.
:param text:
:return:
"""
text = text.lower()
text = text.replace("'", '') # Remove apostrophes
# text = re.sub('[^\w\s]', ' ', text)
# Replace punct with a space so that when someone does something like <word comma word> you don't accidentally
# transform it into one word. We remove extra spaces in the next line.
text = re.sub('[^\w\s]', ' ', text)
text = re.sub('\\n', ' ', text)
text = re.sub(' +', ' ', text)
text = text.strip() # Small thing but remove a trailing space.
return text
# Tokenization
def tokenize_string(sentence):
"""
cleanse the string and tokenize to individual words
:param sentence: a string of text (in the context of this application, most likely an individual sentence)
:return: a list of strings
"""
sentence = cleanse(sentence)
return sentence.split(' ')
# TODO: Convert contractions to the "uncontracted" two words. Ex "you'll" -> "you are".
# Would need some list of common contractions. Of course, this is language dependent.
def split_to_sentences(text):
"""
Gets a bunch of text and returns the sentences as a list. It attempts to split the text up into its component
sentences, using punctuation that typically ends a sentence (see constants.PUNCTUATION_REGEX, which at the moment is
'[.?!]'). Text that does not behave this way, for example when each line is intended to be independent,
will likely give an unexpected result.
:param text: regular text; for example the contents of a file of text
:return: a list, where each element in the list is a sentence
"""
# TODO: A way to handle text that is broken up by lines (for example, poetry); maybe allow the call to specify
# the regex.
p = re.compile(constants.PUNCTUATION_REGEX)
sentences = p.split(text)
for i in range(len(sentences)):
sentences[i] = sentences[i].strip()
if sentences[-1] == '':
sentences = sentences[:-1]
return sentences
def tokenize_by_sentence(text):
"""
Tokenize the text, but group words in sentences together. The input, and constraints on tokenization,
are the same as for split_to_sentences().
:param text: regular text; for example the contents of a file of text
:return: A list of lists. Each list inside the overall list is the words in a given sentence.
"""
sentences = split_to_sentences(text)
result = []
for sentence in sentences:
current_result = tokenize_string(sentence)
if current_result is not None and current_result != ['']:
result.append(current_result)
return result
# Statistics
def find_word_stats(text):
"""
Get statistics on word frequencies. This tells you the word and how many times it occurred in the text. There
are also columns for the fraction of total words that it represents, cumulative count and cumulative ratio of all
words, "cumulative" being if you count that word and all other words above it.
:param text:
:return: a DataFrame, sorted by most common words first
"""
tokens = tokenize_string(text)
tokens_pd = pd.Series(tokens)
token_hist = tokens_pd.value_counts()
stats = pd.DataFrame({constants.COUNT_COLUMN_NAME: token_hist, constants.FRACTION_COLUMN_NAME: token_hist / len(tokens)})
stats[constants.FRACTION_COLUMN_NAME] = stats[constants.COUNT_COLUMN_NAME] / len(tokens)
stats[constants.CUM_SUM_COLUMN_NAME] = stats[constants.COUNT_COLUMN_NAME].cumsum()
stats[constants.CUM_FRAC_COLUMN_NAME] = stats[constants.CUM_SUM_COLUMN_NAME] / len(tokens)
return stats
def find_sentence_lengths_hist(list_of_sentences):
"""
Find a histogram of all sentences lengths. Could be useful in looking at the writing style of an author,
for example.
:param list_of_sentences:
:return: a pandas Series
"""
lengths = []
for i in range(len(list_of_sentences)):
lengths.append(len(list_of_sentences[i]))
hist = pd.Series(lengths).value_counts().sort_index()
return hist
# n grams
def find_n_grams_1_d_list(input: list, n: int):
ngrams = []
if n <= 0:
return ngrams
for i in range(len(input) - n + 1):
ngrams.append(input[i:(i+n)])
return ngrams
def find_n_grams_list_of_lists(list_of_sentences: list, n: int):
"""
Make ngrams from the input. The input is structured so that each sentence is separate, and ngrams do not cross
sentences. For example, in the text "One sentence. Two sentences.", 2 grams would be "one sentence" and "two
sentences", but not "sentence two".
:param list_of_sentences: Must be a list where each item is itself a list. For example, the following text
"One sentence. Two sentences. To be or not to be. Whatever. The problem is that I don't even know " \
"what a sentence is." would need to be converted to the following format to be used in this function:
[['one', 'sentence'], ['two', 'sentences'], ['to', 'be', 'or', 'not', 'to', 'be'],
['whatever'], ['the', 'problem', 'is', 'that', 'i', 'dont', 'even', 'know', 'what', 'a', 'sentence', 'is']
:param n: the "gram length"; for example 2 => bi grams ("to be", "be or", "or not", etc.)
3 => tri grams ("to be or", "be or not" etc.)
:return: a "flat" (one dimensional) list with all the n grams
"""
ngrams = []
if list_of_sentences is None:
return ngrams
for item in list_of_sentences:
current_ngrams = find_n_grams_1_d_list(item, n)
ngrams.extend(current_ngrams)
return ngrams
def find_n_grams_from_text(text, n):
lists_of_words = tokenize_by_sentence(text)
ngrams = find_n_grams_list_of_lists(lists_of_words, n)
return ngrams
def convert_n_grams_to_hist_df(ngrams_list):
"""
Take the raw list of n grams and convert to a DataFrame that maps the n gram to the count
:param ngrams_list: a list of all ngrams
:return: a DataFrame
"""
# Use Series.value_counts() to get the counts.
n_grams_series = pd.Series(ngrams_list)
# However, using value_counts() on a list is extremely slow, so convert it to a string.
# example ['in', 'the', 'fridge'] -> "in the fridge"
# I know, maybe find_n_grams_from_text should return the results as a string so we aren't making it a list and
# then converting back. TODO: Determine at some point if we should generate n grams as a string not a list.
n_grams_series = n_grams_series.apply(constants.N_GRAM_SEPARATOR.join)
n_grams_hist = n_grams_series.value_counts()
grams_hist_df = pd.DataFrame({constants.GRAM_COLUMN_NAME: n_grams_hist.index, constants.COUNT_COLUMN_NAME: n_grams_hist.values})
return grams_hist_df
def process_one_file(file_name):
start = time.time()
print(start)
with open(file_name, 'r', encoding='UTF-8') as f:
file_text = f.read()
# word_stats_df = find_word_stats(file_text)
sentences = tokenize_by_sentence(file_text)
# sentence_lengths = find_sentence_lengths_hist(sentences)
two_grams = find_n_grams_list_of_lists(sentences, 2)
three_grams = find_n_grams_list_of_lists(sentences, 3)
four_grams = find_n_grams_list_of_lists(sentences, 4)
five_grams = find_n_grams_list_of_lists(sentences, 5)
six_grams = find_n_grams_list_of_lists(sentences, 6)
print('we have the n grams now', time.time())
#two_grams_series = pd.Series(two_grams)
# two_grams_series = two_grams_series.apply(",".join)
#two_grams_series = two_grams_series.apply(constants.N_GRAM_SEPARATOR.join)
#two_grams_hist = two_grams_series.value_counts()
two_grams_hist_df = convert_n_grams_to_hist_df(two_grams)
print('we have the two grams hist now', time.time())
#three_grams_series = pd.Series(three_grams)
# three_grams_series = three_grams_series.apply(",".join)
#three_grams_series = three_grams_series.apply(constants.N_GRAM_SEPARATOR.join)
#three_grams_hist = three_grams_series.value_counts()
three_grams_hist_df = convert_n_grams_to_hist_df(three_grams)
print('we have the three grams hist now', time.time())
#four_grams_series = pd.Series(four_grams)
#four_grams_series = four_grams_series.apply(constants.N_GRAM_SEPARATOR.join)
#four_grams_hist = four_grams_series.value_counts()
four_grams_hist_df = convert_n_grams_to_hist_df(four_grams)
five_grams_hist_df = convert_n_grams_to_hist_df(five_grams)
six_grams_hist_df = convert_n_grams_to_hist_df(six_grams)
end = time.time()
print("processing completed in", (end - start), "seconds")
'''pd.DataFrame(two_grams_hist_df).to_csv(file_name.split("/")[-1] + "_2_grams.csv", index=True)
pd.DataFrame(three_grams_hist_df).to_csv(file_name.split("/")[-1] + "_3_grams.csv", index=True)
pd.DataFrame(four_grams_hist_df).to_csv(file_name.split("/")[-1] + "_4_grams.csv", index=True)
pd.DataFrame(five_grams_hist_df).to_csv(file_name.split("/")[-1] + "_5_grams.csv", index=True)
pd.DataFrame(six_grams_hist_df).to_csv(file_name.split("/")[-1] + "_6_grams.csv", index=True)'''
two_grams_hist_df.to_csv(file_name.split("/")[-1] + "_2_grams.csv", index=False)
three_grams_hist_df.to_csv(file_name.split("/")[-1] + "_3_grams.csv", index=False)
four_grams_hist_df.to_csv(file_name.split("/")[-1] + "_4_grams.csv", index=False)
five_grams_hist_df.to_csv(file_name.split("/")[-1] + "_5_grams.csv", index=False)
six_grams_hist_df.to_csv(file_name.split("/")[-1] + "_6_grams.csv", index=False)
print("files saved")
def split_prefix(text):
"""
get all the words in the sentence except the last
:param text:
:return:
"""
tokens = text.split(" ")
return " ".join(tokens[:(len(tokens) - 1)])
def create_prefix_map(ngrams_hist):
"""
Create a Dict object that maps the prefix to all indices in the input histogram where that prefix occurs
:param ngrams_hist: a DataFrame
:return: a dictionary
"""
if constants.SOURCE not in ngrams_hist.columns:
# ngrams_hist['prefix'] = ngrams_hist.gram.apply(split_prefix)
ngrams_hist[constants.SOURCE] = ngrams_hist.gram.apply(split_prefix)
if constants.TARGET not in ngrams_hist.columns:
ngrams_hist[constants.TARGET] = ngrams_hist.gram.apply(lambda text: text.split(" ")[-1])
prefix_map = {}
for (index, gram, count, prefix, target) in ngrams_hist.itertuples():
# print(gram, count, prefix, target)
if prefix not in prefix_map:
prefix_map[prefix] = [index]
else:
prefix_map[prefix].append(index)
return prefix_map
# Creating training data
def get_random_sentences(sentences_as_list, how_many, min_num_words=None):
"""
Get random sentences from a list of sentences. Technically, all this function does, at least right now,
is one line, return np.random.choice(sentences_as_list, how_many, replace=False). But that could change.
:param sentences_as_list:
:param how_many:
:return:
"""
if how_many > len(sentences_as_list):
how_many = len(sentences_as_list)
# TODO: The line below returns an np array. Should it return a regular list?
# return np.random.choice(sentences_as_list, how_many, replace=False)
# To test for minimum number of words, let's shuffle the indices first, then, if necessary, test each sentence at
# the random index | |
# -*- coding: utf-8 -*-
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import collections
import math
from typing import Optional, Sequence, Tuple, Union
from ..core._imperative_rt.core2 import apply, dtype_promotion
from ..core._trace_option import use_symbolic_shape
from ..core.ops import builtin
from ..core.ops.special import Const
from ..core.tensor import amp
from ..core.tensor.utils import _normalize_axis, cast_tensors, setscalar
from ..tensor import Tensor
from .debug_param import get_execution_strategy
from .elemwise import clip
from .tensor import broadcast_to, concat, expand_dims, squeeze
__all__ = [
"argmax",
"argmin",
"argsort",
"dot",
"isinf",
"isnan",
"matinv",
"matmul",
"max",
"mean",
"min",
"norm",
"normalize",
"prod",
"sign",
"sort",
"std",
"sum",
"svd",
"topk",
"var",
]
def isnan(inp: Tensor) -> Tensor:
r"""
Returns a new tensor representing if each element is ``NaN`` or not.
:param inp: input tensor.
:return: result tensor.
Examples:
.. testcode::
from megengine import tensor
import megengine.functional as F
x = tensor([1, float("nan"), 0])
print(F.isnan(x).numpy())
Outputs:
.. testoutput::
[False True False]
"""
return inp != inp
def isinf(inp: Tensor) -> Tensor:
r"""
Returns a new tensor representing if each element is ``Inf`` or not.
:param inp: input tensor.
:return: result tensor.
Examples:
.. testcode::
from megengine import tensor
import megengine.functional as F
x = tensor([1, float("inf"), 0])
print(F.isinf(x).numpy())
Outputs:
.. testoutput::
[False True False]
"""
return abs(inp).astype("float32") == float("inf")
def sign(inp: Tensor):
r"""
Returns a new tensor representing the sign of each element in input tensor.
:param: input tensor.
:return: the sign of input tensor.
Examples:
.. testcode::
from megengine import tensor
import megengine.functional as F
x = tensor([1, -1, 0])
print(F.sign(x).numpy())
Outputs:
.. testoutput::
[ 1 -1 0]
"""
return (inp > 0).astype(inp.dtype) - (inp < 0).astype(inp.dtype)
def sum(
inp: Tensor,
axis: Optional[Union[int, Sequence[int]]] = None,
keepdims: bool = False,
) -> Tensor:
r"""
Returns the sum of input tensor along given axis. If axis is a list of dimensions,
reduce over all of them.
:param inp: input tensor.
:param axis: dimension to reduce. If None, all dimensions will be reduced.
Default: None
:param keepdims: whether the output tensor has axis retained or not.
Default: False
:return: output tensor.
Examples:
.. testcode::
import numpy as np
from megengine import tensor
import megengine.functional as F
x = tensor(np.arange(1, 7, dtype=np.int32).reshape(2, 3))
out = F.sum(x)
print(out.numpy())
Outputs:
.. testoutput::
21
"""
return inp.sum(axis=axis, keepdims=keepdims)
def prod(
inp: Tensor, axis: Optional[Union[int, Sequence[int]]] = None, keepdims=False
) -> Tensor:
r"""
Returns the product of input tensor along given axis. If axis is a list of dimensions,
reduce over all of them.
:param inp: input tensor.
:param axis: dimension to reduce. If None, all dimensions will be reduced. Default: None
:param keepdims: whether the output tensor has axis retained or not. Default: False
:return: output tensor.
Examples:
.. testcode::
import numpy as np
from megengine import tensor
import megengine.functional as F
x = tensor(np.arange(1, 7, dtype=np.int32).reshape(2, 3))
out = F.prod(x)
print(out.numpy())
Outputs:
.. testoutput::
720
"""
return inp.prod(axis=axis, keepdims=keepdims)
def mean(
inp: Tensor,
axis: Optional[Union[int, Sequence[int]]] = None,
keepdims: bool = False,
) -> Tensor:
"""
Returns the mean value of input tensor along
given axis. If axis is a list of dimensions,
reduce over all of them.
:param inp: input tensor.
:param axis: dimension to reduce. If None, all dimensions will be reduced. Default: None
:param keepdims: whether the output tensor has axis retained or not. Default: False
:return: output tensor.
Examples:
.. testcode::
import numpy as np
from megengine import tensor
import megengine.functional as F
x = tensor(np.arange(1, 7, dtype=np.int32).reshape(2, 3))
out = F.mean(x)
print(out.numpy())
Outputs:
.. testoutput::
3.5
"""
return inp.mean(axis=axis, keepdims=keepdims)
def var(
inp: Tensor,
axis: Optional[Union[int, Sequence[int]]] = None,
keepdims: bool = False,
) -> Tensor:
"""
Returns the variance value of input tensor along
given axis. If axis is a list of dimensions,
reduce over all of them.
:param inp: input tensor.
:param axis: dimension to reduce. If None, all dimensions will be reduced. Default: None
:param keepdims: whether the output tensor has axis retained or not. Default: False
:return: output tensor.
Examples:
.. testcode::
import numpy as np
from megengine import tensor
import megengine.functional as F
data = tensor(np.arange(1, 7, dtype=np.float32).reshape(2, 3))
out = F.var(data)
print(out.numpy().round(decimals=4))
Outputs:
.. testoutput::
2.9167
"""
if axis is None:
m = mean(inp, axis=axis, keepdims=False)
else:
m = mean(inp, axis=axis, keepdims=True)
v = inp - m
return mean(v ** 2, axis=axis, keepdims=keepdims)
def std(
inp: Tensor,
axis: Optional[Union[int, Sequence[int]]] = None,
keepdims: bool = False,
) -> Tensor:
"""
Returns the standard deviation of input tensor along
given axis. If axis is a list of dimensions,
reduce over all of them.
:param inp: input tensor.
:param axis: dimension to reduce. If None, all dimensions will be reduced. Default: None
:param keepdims: whether the output tensor has axis retained or not. Default: False
:return: output tensor.
Examples:
.. testcode::
import numpy as np
from megengine import tensor
import megengine.functional as F
data = tensor(np.arange(1, 7, dtype=np.float32).reshape(2, 3))
out = F.std(data, axis=1)
print(out.numpy().round(decimals=4))
Outputs:
.. testoutput::
[0.8165 0.8165]
"""
return var(inp, axis=axis, keepdims=keepdims) ** 0.5
def min(
inp: Tensor,
axis: Optional[Union[int, Sequence[int]]] = None,
keepdims: bool = False,
) -> Tensor:
r"""
Returns the min value of input tensor along
given axis. If axis is a list of dimensions,
reduce over all of them.
:param inp: input tensor.
:param axis: dimension to reduce. If None, all dimensions will be reduced. Default: None
:param keepdims: whether the output tensor has axis retained or not. Default: False
:return: output tensor.
Examples:
.. testcode::
import numpy as np
from megengine import tensor
import megengine.functional as F
x = tensor(np.arange(1, 7, dtype=np.int32).reshape(2,3))
out = F.min(x)
print(out.numpy())
Outputs:
.. testoutput::
1
"""
return inp.min(axis=axis, keepdims=keepdims)
def max(
inp: Tensor,
axis: Optional[Union[int, Sequence[int]]] = None,
keepdims: bool = False,
) -> Tensor:
r"""
Returns the max value of the input tensor along
given axis. If axis is a list of dimensions,
reduce over all of them.
:param inp: input tensor.
:param axis: dimension to reduce. If None, all dimensions will be reduced. Default: None
:param keepdims: whether the output tensor has axis retained or not. Default: False
:return: output tensor.
Examples:
.. testcode::
import numpy as np
from megengine import tensor
import megengine.functional as F
x = tensor(np.arange(1, 7, dtype=np.int32).reshape(2,3))
out = F.max(x)
print(out.numpy())
Outputs:
.. testoutput::
6
"""
return inp.max(axis=axis, keepdims=keepdims)
def norm(
inp: Tensor, ord: float = None, axis: int = None, keepdims=False,
):
"""
Calculates ``p``-norm of input tensor along
given axis.
:param inp: input tensor.
:param ord: power of value applied to inp. Default: 2
:param axis: dimension to reduce. If None, input must be a vector. Default: None
:param keepdims: whether the output tensor has axis retained or not. Default: False
:return: output tensor.
Examples:
.. testcode::
import numpy as np
from megengine import tensor
import megengine.functional as F
x = tensor(np.arange(-3, 3, dtype=np.float32))
out = F.norm(x)
print(out.numpy().round(decimals=4))
Outputs:
.. testoutput::
4.3589
"""
if axis is None:
if inp.ndim != 1:
raise TypeError("axis is required unless input is a vector")
if ord is None:
ord = 2
if ord == 0:
return sum(inp != 0, axis=axis, keepdims=keepdims)
if ord == math.inf:
return max(abs(inp))
if ord == -math.inf:
return min(abs(inp))
return sum(abs(inp) ** ord, axis=axis, keepdims=keepdims) ** (1.0 / ord)
def argmin(
inp: Tensor,
axis: Optional[Union[int, Sequence[int]]] = None,
keepdims: bool = False,
) -> Tensor:
r"""
Returns the indices of the minimum values along
given axis. If axis is a list of dimensions,
reduce over all of them.
:param inp: input tensor.
:param axis: dimension to reduce. If None, all dimensions will be reduced. Default: None
:param keepdims: whether the output tensor has axis retained or not. Default: False
:return: output tensor.
Examples:
.. testcode::
import numpy as np
from megengine import tensor
import megengine.functional as F
x = tensor(np.arange(1, 7, dtype=np.int32).reshape(2,3))
out = F.argmin(x)
| |
<filename>examples/models/image_classification/PyPandaVgg_SelectiveNet.py
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import division
from __future__ import print_function
import os
import sys
import argparse
from typing import Union, Dict, Optional, Any, List
# singa_auto Dependency
from singa_auto.model import CategoricalKnob, FixedKnob, utils
from singa_auto.model.knob import BaseKnob
from singa_auto.constants import ModelDependency
from singa_auto.model.dev import test_model_class
from singa_easy.modules.mod_spl.spl import SPL
from singa.easy.modules.mod_driftadapt import LabelDriftAdapter
from singa_easy.datasets.TorchImageDataset import TorchImageDataset
# PyTorch Dependency
import torch
import torch.nn as nn
### to localize vgg
# from torchvision.models.vgg import vgg11_bn
from torch.utils.data import DataLoader
import torch.utils.model_zoo as model_zoo
import torch.optim as optim
from torch.optim import lr_scheduler
import math
# Misc Third-party Machine-Learning Dependency
import numpy as np
# singa-easy Modules Dependency
from singa_easy.models.TorchModel import TorchModel
from singa_easy.modules.mod_gmreg.gm_prior_optimizer_pytorch import GMOptimizer
from singa_easy.modules.mod_modelslicing.models import upgrade_dynamic_layers, create_sr_scheduler
KnobConfig = Dict[str, BaseKnob]
Knobs = Dict[str, Any]
Params = Dict[str, Union[str, int, float, np.ndarray]]
### add vgg with selective net
__all__ = [
'VGG',
'vgg11_bn',
]
model_urls = {
'vgg11': 'https://download.pytorch.org/models/vgg11-bbd30ac9.pth',
}
class VGG(nn.Module):
def __init__(self, features, num_classes=1000, init_weights=True):
super(VGG, self).__init__()
self.features = features
self.classifier = nn.Sequential(
nn.Linear(
8192, 4096
), # 512 * 7 * 7, 4096 (original) size to be decided, 8192, 4096 for cifar10
nn.ReLU(True),
nn.Dropout(),
nn.Linear(4096, 4096),
nn.ReLU(True),
nn.Dropout(),
nn.Linear(4096, num_classes),
)
if init_weights:
self._initialize_weights()
# Add selection head with respect to the SelectiveNet code
self.selectionhead = nn.Sequential(
nn.Linear(8192, 4096),
nn.ReLU(False),
nn.BatchNorm1d(4096),
nn.Linear(4096, 1),
nn.Sigmoid(),
)
# one output neuron with sigmoid
def forward(self, x):
x = self.features(x)
# 4D to 2D, [BatchSize, 512, 4, 4] to [BatchSize, 8192]
x = x.view(x.size(0), -1)
# add slectionhead into forward
selectionhead = self.selectionhead(x)
x = self.classifier(x)
return (x, selectionhead)
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
if m.bias is not None:
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.Linear):
m.weight.data.normal_(0, 0.01)
m.bias.data.zero_()
def make_layers(cfg, batch_norm=False):
layers = []
in_channels = 3
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
else:
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
if batch_norm:
layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
return nn.Sequential(*layers)
cfg = {
'A': [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'],
}
def vgg11_bn(pretrained=False, **kwargs):
"""VGG 11-layer model (configuration "A") with batch normalization
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
if pretrained:
kwargs['init_weights'] = False
model = VGG(make_layers(cfg['A'], batch_norm=True), **kwargs)
if pretrained:
model.load_state_dict(model_zoo.load_url(model_urls['vgg11_bn']))
return model
### end of adding vgg with selective net
class PyPandaVgg(TorchModel):
"""
Implementation of PyTorch DenseNet
"""
def __init__(self, **knobs):
super().__init__(**knobs)
# overwrite train function
def train(self,
dataset_path: str,
shared_params: Optional[Params] = None,
**train_args):
"""
Overide BaseModel.train()
Train the model with given dataset_path
parameters:
dataset_path: path to dataset_path
type: str
**kwargs:
optional arguments
return:
nothing
"""
dataset = utils.dataset.load_dataset_of_image_files(
dataset_path,
min_image_size=32,
max_image_size=self._knobs.get("max_image_size"),
mode='RGB',
lazy_load=True)
self._normalize_mean, self._normalize_std = dataset.get_stat()
# self._normalize_mean = [0.48233507, 0.48233507, 0.48233507]
# self._normalize_std = [0.07271624, 0.07271624, 0.07271624]
self._num_classes = dataset.classes
print('num_class', dataset.classes)
# construct the model
self._model = self._create_model(scratch=self._knobs.get("scratch"),
num_classes=self._num_classes)
if self._knobs.get("enable_model_slicing"):
self._model = upgrade_dynamic_layers(
model=self._model,
num_groups=self._knobs.get("model_slicing_groups"),
sr_in_list=[0.5, 0.75, 1.0])
if self._knobs.get("enable_gm_prior_regularization"):
self._gm_optimizer = GMOptimizer()
for name, f in self._model.named_parameters():
self._gm_optimizer.gm_register(
name,
f.data.cpu().numpy(),
model_name="PyVGG",
hyperpara_list=[
self._knobs.get("gm_prior_regularization_a"),
self._knobs.get("gm_prior_regularization_b"),
self._knobs.get("gm_prior_regularization_alpha"),
],
gm_num=self._knobs.get("gm_prior_regularization_num"),
gm_lambda_ratio_value=self._knobs.get(
"gm_prior_regularization_lambda"),
uptfreq=[
self._knobs.get("gm_prior_regularization_upt_freq"),
self._knobs.get(
"gm_prior_regularization_param_upt_freq")
])
if self._knobs.get("enable_spl"):
self._spl = SPL()
train_dataset = TorchImageDataset(sa_dataset=dataset,
image_scale_size=128,
norm_mean=self._normalize_mean,
norm_std=self._normalize_std,
is_train=True)
train_dataloader = DataLoader(train_dataset,
batch_size=self._knobs.get("batch_size"),
shuffle=True)
#Setup Criterion
if self._num_classes == 2:
self.train_criterion = nn.CrossEntropyLoss(
) # type(torch.LongTensor)
# add selectionhead loss
self.selectionhead_criterion = nn.CrossEntropyLoss()
else:
self.train_criterion = nn.MultiLabelSoftMarginLoss(
) # type(torch.FloatTensor)
# add selectionhead loss
self.selectionhead_criterion = nn.MultiLabelSoftMarginLoss()
#Setup Optimizer
if self._knobs.get("optimizer") == "adam":
optimizer = optim.Adam(filter(lambda p: p.requires_grad,
self._model.parameters()),
lr=self._knobs.get("lr"),
weight_decay=self._knobs.get("weight_decay"))
elif self._knobs.get("optimizer") == "rmsprop":
optimizer = optim.RMSprop(
filter(lambda p: p.requires_grad, self._model.parameters()),
lr=self._knobs.get("lr"),
weight_decay=self._knobs.get("weight_decay"))
else:
raise NotImplementedError()
#Setup Learning Rate Scheduler
scheduler = lr_scheduler.ReduceLROnPlateau(optimizer,
patience=1,
threshold=0.001,
factor=0.1)
if self._use_gpu:
self._model = self._model.cuda()
self._model.train()
if self._knobs.get("enable_model_slicing"):
sr_scheduler = create_sr_scheduler(
scheduler_type=self._knobs.get("model_slicing_scheduler_type"),
sr_rand_num=self._knobs.get("model_slicing_randnum"),
sr_list=[0.5, 0.75, 1.0],
sr_prob=None)
# SelectiveNet params
lamda = self._knobs.get("lamda")
selectionheadloss_weight = self._knobs.get("selectionheadloss_weight")
target_coverage = self._knobs.get("target_coverage")
for epoch in range(1, self._knobs.get("max_epochs") + 1):
print("Epoch {}/{}".format(epoch, self._knobs.get("max_epochs")))
batch_losses = []
for batch_idx, (raw_indices, traindata,
batch_classes) in enumerate(train_dataloader):
inputs, labels = self._transform_data(traindata,
batch_classes,
train=True)
optimizer.zero_grad()
if self._knobs.get("enable_model_slicing"):
for sr_idx in next(sr_scheduler):
self._model.update_sr_idx(sr_idx)
# add selection head outputs, selectionhead be a column
(outputs, selectionhead) = self._model(inputs)
predloss = self.train_criterion(outputs, labels)
# apply the Interoir Point Method on labels # same as selectionhead.view(-1, 1).repeat(1,self._num_classes).view(selectionhead.shape[0],-1) * labels
interior_point_of_labels = selectionhead * labels
auxiliaryhead = outputs
empirical_coverage = selectionhead.type(
torch.float64).mean()
selectionheadloss = self.selectionhead_criterion(
interior_point_of_labels, auxiliaryhead) + lamda * (
target_coverage -
empirical_coverage).clamp(min=0)**2
selectionheadloss = torch.tensor(
selectionheadloss, dtype=torch.float).cuda()
trainloss = selectionheadloss * selectionheadloss_weight + predloss * (
1 - selectionheadloss_weight)
trainloss.backward()
else:
# add selection head outputs, selectionhead be a column
(outputs, selectionhead) = self._model(inputs)
predloss = self.train_criterion(outputs, labels)
# apply the Interoir Point Method on labels # same as selectionhead.view(-1, 1).repeat(1,self._num_classes).view(selectionhead.shape[0],-1) * labels
interior_point_of_labels = selectionhead * labels
auxiliaryhead = outputs
empirical_coverage = selectionhead.type(
torch.float64).mean()
selectionheadloss = self.selectionhead_criterion(
interior_point_of_labels, auxiliaryhead) + lamda * (
target_coverage -
empirical_coverage).clamp(min=0)**2
selectionheadloss = torch.tensor(selectionheadloss,
dtype=torch.float).cuda()
trainloss = selectionheadloss * selectionheadloss_weight + predloss * (
1 - selectionheadloss_weight)
trainloss.backward()
if self._knobs.get("enable_gm_prior_regularization"):
for name, f in self._model.named_parameters():
self._gm_optimizer.apply_GM_regularizer_constraint(
labelnum=1,
trainnum=0,
epoch=epoch,
weight_decay=self._knobs.get("weight_decay"),
f=f,
name=name,
step=batch_idx)
if self._knobs.get("enable_spl"):
train_dataset.update_sample_score(
raw_indices,
trainloss.detach().cpu().numpy())
optimizer.step()
print("Epoch: {:d} Batch: {:d} Train Loss: {:.6f}".format(
epoch, batch_idx, trainloss.item()))
sys.stdout.flush()
batch_losses.append(trainloss.item())
train_loss = np.mean(batch_losses)
print("Training Loss: {:.6f}".format(train_loss))
if self._knobs.get("enable_spl"):
train_dataset.update_score_threshold(
threshold=self._spl.calculate_threshold_by_epoch(
epoch=epoch,
threshold_init=self._knobs.get("spl_threshold_init"),
mu=self._knobs.get("spl_mu")))
def evaluate(self, dataset_path):
dataset = utils.dataset.load_dataset_of_image_files(
dataset_path,
min_image_size=32,
max_image_size=self._knobs.get("max_image_size"),
mode='RGB',
lazy_load=True)
torch_dataset = TorchImageDataset(sa_dataset=dataset,
image_scale_size=128,
norm_mean=self._normalize_mean,
norm_std=self._normalize_std,
is_train=False)
torch_dataloader = DataLoader(torch_dataset,
batch_size=self._knobs.get("batch_size"))
self._model.eval()
if self._knobs.get("enable_label_adaptation"):
self._label_drift_adapter = LabelDriftAdapter(
model=self._model, num_classes=self._num_classes)
batch_losses = []
outs = []
gts = []
# SelectiveNet params
lamda = self._knobs.get("lamda")
selectionheadloss_weight = self._knobs.get("selectionheadloss_weight")
target_coverage = self._knobs.get("target_coverage")
print(
'selectionhead: When the selectiionhead is lower than 0.5, the model is prone to make wrong pred'
)
print(
'True means the model makes empirical pred correctly, False otherwise'
)
with torch.no_grad():
for batch_idx, (raw_indices, batch_data,
batch_classes) in enumerate(torch_dataloader):
inputs, labels = self._transform_data(batch_data,
batch_classes,
train=True)
(outputs, selectionhead) = self._model(inputs)
predloss = self.train_criterion(outputs, labels)
# loss intergrated with SelectiveNet
interior_point_of_labels = selectionhead * labels
auxiliaryhead = outputs
empirical_coverage = selectionhead.type(torch.float64).mean()
selectionheadloss = self.selectionhead_criterion(
interior_point_of_labels, auxiliaryhead) + lamda * (
target_coverage - empirical_coverage).clamp(min=0)**2
selectionheadloss = torch.tensor(selectionheadloss,
dtype=torch.float).cuda()
loss = selectionheadloss * selectionheadloss_weight + predloss * (
1 - selectionheadloss_weight)
batch_losses.append(loss.item())
outs.extend(torch.sigmoid(outputs).cpu().numpy())
gts.extend(labels.cpu().numpy())
if self._knobs.get("enable_label_adaptation"):
self._label_drift_adapter.accumulate_c(outputs, labels)
print("Batch: {:d}".format(batch_idx))
print('selectionhead: ', selectionhead[-1].cpu().numpy())
print('Pred T/F: ',
-0.5 < labels[-1].cpu().numpy() - outs[-1] < 0.5)
if self._knobs.get("enable_label_adaptation"):
self._label_drift_adapter.estimate_cinv()
valid_loss = np.mean(batch_losses)
print("Validation Loss: {:.6f}".format(valid_loss))
gts = np.array(gts)
outs = np.array(outs)
# in case that the ground truth has only one dimension
# i.e. is size of (N,) with integer elements of 0...C-1, where C is the number of classes
# the ground truth array has to be "one hot" encoded for evaluating the performance metric
if len(gts.shape) == 1:
gts = np.eye(self._num_classes)[gts].astype(np.int64)
pr_auc, roc_auc, f1, acc = self.get_peformance_metrics(
gts=np.array(gts), probabilities=np.array(outs))
return f1
def predict(self, queries: List[Any]) -> List[Any]:
"""
Overide BaseModel.predict()
Making prediction using queries
Parameters:
queries: list of quries
Return:
outs: list of numbers indicating scores of classes
"""
images = utils.dataset.transform_images(queries,
image_size=128,
mode='RGB')
(images, _, _) = utils.dataset.normalize_images(images,
self._normalize_mean,
self._normalize_std)
ndarray_images, pil_images = utils.dataset.transform_images(
queries, image_size=128, mode='RGB')
if self._use_gpu:
self._model.cuda()
self._model.eval()
# images are size of (B, W, H, C)
with torch.no_grad():
try:
images = torch.FloatTensor(images).permute(0, 3, 1, 2).cuda()
except Exception:
images = torch.FloatTensor(images).permute(0, 3, 1, 2)
(outs, selectionhead) = self._model(images)
if self._knobs.get("enable_label_adaptation"):
outs = self._label_drift_adapter.adapt(outs)
else:
outs = torch.sigmoid(outs).cpu()
result = | |
2.2.10.",
"GitHubIssues": [],
},
"2": {
"Description": [
"Description line 1 of release 2.2.2",
"Description line 2 of release 2.2.2",
],
"Title": "Release 2.2.2.",
"GitHubIssues": [],
},
"9": {
"Description": [
"Description line 1 of release 2.2.9",
"Description line 2 of release 2.2.9",
],
"Title": "Release 2.2.9.",
"GitHubIssues": [],
},
},
},
}
b_tls = Archiver(_PROJ_DESC, _PROJ_PATH)
class TestReleaseLogIt:
def test__init__default(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
t_releaselogit = releaselogit.ReleaseLogIt(
working_dir, p_parent_log_name=_PROJ_NAME
)
assert t_releaselogit.rel_notes == _TOML_CONTENTS_DEF_STRUCT
assert t_releaselogit.rel_list == [["0", "0", "0"]]
assert t_releaselogit.src_pth.exists()
assert t_releaselogit.success
pass
def test__init__existing(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXIST_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.rel_notes == _TOML_CONTENTS_EXIST_STRUCT
assert t_releaselogit.rel_list == [["0", "0", "0"], ["0", "0", "1"]]
assert t_releaselogit.src_pth.exists()
assert t_releaselogit.success
pass
def test__init__extended(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXTENDED_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.rel_notes == _TOML_CONTENTS_EXTENDED_STRUCT
assert t_releaselogit.rel_list == [
["0", "0", "0"],
["0", "0", "9"],
["0", "0", "10"],
["0", "1", "0"],
["0", "1", "1"],
["0", "1", "2"],
["0", "2", "0"],
["0", "2", "1"],
["0", "2", "2"],
["1", "0", "0"],
["1", "0", "1"],
["1", "0", "2"],
["1", "1", "0"],
["1", "1", "1"],
["1", "1", "2"],
["1", "2", "0"],
["1", "2", "1"],
["1", "2", "2"],
["2", "0", "0"],
["2", "0", "1"],
["2", "0", "2"],
["2", "1", "0"],
["2", "1", "1"],
["2", "1", "2"],
["2", "2", "2"],
["2", "2", "9"],
["2", "2", "10"],
]
assert t_releaselogit.src_pth.exists()
assert t_releaselogit.success
pass
def test__iter__(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXIST_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert isinstance(t_releaselogit, releaselogit.ReleaseLogIt)
assert t_releaselogit.cur_pos == 0
pass
def test__next__(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXIST_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
elements = iter(t_releaselogit)
assert next(elements) == {
"0": {
"0": {
"0": {
"Description": [
"List all the changes to the project here.",
"Changes listed here will be in the release notes under the above heading.",
],
"Title": "Creation of the project",
"GitHubIssues": [],
}
}
}
}
assert next(elements) == {
"0": {
"0": {
"1": {
"Description": [
"Changes for 0.0.1 are listed here.",
"Add as many description lines as you like.",
],
"Title": "This is a new release.",
"GitHubIssues": [],
}
}
}
}
with pytest.raises(StopIteration):
assert next(elements)
def test__repr__extended(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXTENDED_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert repr(t_releaselogit) == 'ReleaseLogIt(0,"0.0.0")'
pass
def test__str__extended(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXTENDED_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert str(t_releaselogit) == "0.0.0"
pass
def test_add_release_note(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
release_note_100 = {
"1": {
"0": {
"0": {
"Description": [
"Description line 1.",
"Description line 2.",
],
"Title": "Release change 1.0.0",
"GitHubIssues": [],
}
}
}
}
release_note_010 = {
"0": {
"1": {
"0": {
"Description": [
"Description line 1.",
"Description line 2.",
],
"Title": "Release change 0.1.0",
"GitHubIssues": [],
}
}
}
}
release_note_001 = {
"0": {
"0": {
"1": {
"Description": [
"Description line 1.",
"Description line 2.",
],
"Title": "Release change 0.0.1",
"GitHubIssues": [],
}
}
}
}
release_note_000 = {
"0": {
"0": {
"0": {
"Description": [
"Description line 1.",
"Description line 2.",
],
"Title": "Release change 0.0.0",
"GitHubIssues": [],
}
}
}
}
release_note_default = {
"Description": [
"Description line 1.",
"Description line 2.",
],
"Title": "Release change 0.0.0",
"GitHubIssues": [],
}
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.add_release_note(release_note_100)
assert t_releaselogit.add_release_note(release_note_010)
assert t_releaselogit.add_release_note(release_note_001)
assert not t_releaselogit.add_release_note(release_note_000)
assert not t_releaselogit.add_release_note(release_note_default)
assert t_releaselogit.rel_notes == {
"0": {
"0": {
"0": {
"Description": [
"List all the changes to the project here.",
"Changes listed here will be in the release notes under the above heading.",
],
"Title": "Creation of the project",
"GitHubIssues": [],
},
"1": {
"Description": [
"Description line 1.",
"Description line 2.",
],
"Title": "Release change 0.0.1",
"GitHubIssues": [],
},
},
"1": {
"0": {
"Description": [
"Description line 1.",
"Description line 2.",
],
"Title": "Release change 0.1.0",
"GitHubIssues": [],
},
},
},
"1": {
"0": {
"0": {
"Description": [
"Description line 1.",
"Description line 2.",
],
"Title": "Release change 1.0.0",
"GitHubIssues": [],
}
}
},
}
assert t_releaselogit.rel_list == [
["0", "0", "0"],
["0", "0", "1"],
["0", "1", "0"],
["1", "0", "0"],
]
assert t_releaselogit.rel_cntr == 4
pass
def test_check_release_note(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
release_note = {
"9": {
"9": {
"9": {
"Description": [
"Description line 1.",
"Description line 2.",
],
"Title": "Release 9.9.9",
}
}
}
}
assert t_releaselogit._check_release_note(release_note)
r_n = copy.deepcopy(release_note)
del r_n["9"]["9"]["9"]["Description"]
assert not t_releaselogit._check_release_note(r_n)
r_n = copy.deepcopy(release_note)
r_n["9"]["9"]["9"]["Description"] = "abc"
assert not t_releaselogit._check_release_note(r_n)
r_n = copy.deepcopy(release_note)
r_n["9"]["9"]["9"]["Description"] = []
assert not t_releaselogit._check_release_note(r_n)
r_n = copy.deepcopy(release_note)
r_n["9"]["9"]["9"]["Description"] = ["abc", 123]
assert not t_releaselogit._check_release_note(r_n)
r_n = copy.deepcopy(release_note)
del r_n["9"]["9"]["9"]["Title"]
assert not t_releaselogit._check_release_note(r_n)
r_n = copy.deepcopy(release_note)
r_n["9"]["9"]["9"]["Title"] = "Creation of the project"
assert not t_releaselogit._check_release_note(r_n)
pass
def test_do_example(self):
assert releaselogit.do_examples()
pass
def test_get_release_note_by_title(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXTENDED_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.get_release_note_by_title("Release 1.1.1.") == {
"Title": "Release 1.1.1.",
"Description": [
"Description line 1 of release 1.1.1",
"Description line 2 of release 1.1.1",
],
"GitHubIssues": [],
}
assert t_releaselogit.get_release_note_by_title("Release 9.9.9.") is None
def test_get_release_note_by_version(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXTENDED_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.get_release_note_by_version("1.1.1") == {
"Title": "Release 1.1.1.",
"Description": [
"Description line 1 of release 1.1.1",
"Description line 2 of release 1.1.1",
],
"GitHubIssues": [],
}
assert t_releaselogit.get_release_note_by_version("9.9.9") is None
pass
def test_get_release_titles(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXIST_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.get_release_titles() == [
"Creation of the project",
"This is a new release.",
]
assert t_releaselogit.success
pass
def test_has_title(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXTENDED_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.has_title("Release 1.1.1.")
assert not t_releaselogit.has_title("Release 9.9.9.")
pass
def test_latest(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXTENDED_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.latest() == {
"Title": "Release 2.2.10.",
"Description": [
"Description line 1 of release 2.2.10",
"Description line 2 of release 2.2.10",
],
"GitHubIssues": [],
}
pass
def test_latest_version(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXTENDED_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.latest_version() == "2.2.10"
pass
def test_oldest(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXTENDED_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.oldest() == {
"Title": "Release 0.0.0.",
"Description": [
"Description line 1 of release 0.0.0",
"Description line 2 of release 0.0.0",
],
"GitHubIssues": [],
}
pass
def test_sort(self, setup_env):
"""Assert class __init__"""
working_dir = setup_env
(working_dir / "release.toml").write_text(_TOML_CONTENTS_EXTENDED_CONTENTS)
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
assert t_releaselogit.rel_list == [
['0', '0', '0'],
['0', '0', '9'],
['0', '0', '10'],
['0', '1', '0'],
['0', '1', '1'],
['0', '1', '2'],
['0', '2', '0'],
['0', '2', '1'],
['0', '2', '2'],
['1', '0', '0'],
['1', '0', '1'],
['1', '0', '2'],
['1', '1', '0'],
['1', '1', '1'],
['1', '1', '2'],
['1', '2', '0'],
['1', '2', '1'],
['1', '2', '2'],
['2', '0', '0'],
['2', '0', '1'],
['2', '0', '2'],
['2', '1', '0'],
['2', '1', '1'],
['2', '1', '2'],
['2', '2', '2'],
['2', '2', '9'],
['2', '2', '10'],
]
pass
def test_validate_release_notes(self, setup_env):
working_dir = setup_env
t_releaselogit = releaselogit.ReleaseLogIt(working_dir)
release_note = {
"0": {
"0": {
"1": {
"Description": [
"Changes for 0.0.1 are listed here.",
"Add as many description lines as you like.",
],
"Title": "Release 0.0.1",
},
"2": {
"Description": [
"Changes for 0.0.2 are listed here.",
"Add as many description lines as you like.",
],
"Title": "Release 0.0.2",
},
}
},
"1": {
"1": {
"1": {
"Description": [
"Changes for 1.1.1 are listed here.",
"Add as many description lines as you like.",
],
"Title": "Release 1.1.1",
},
"3": {
"Description": [
"Changes for 1.1.3 are listed here.",
"Add as many description lines as you like.",
],
"Title": "Release 1.1.3",
},
}
},
}
assert t_releaselogit._validate_release_log(release_note)
r_n = copy.deepcopy(release_note)
r_n["a"] = r_n["0"]
del r_n["0"]
assert not t_releaselogit._validate_release_log(r_n)
r_n = copy.deepcopy(release_note)
r_n[0] = r_n["0"].copy()
del r_n["0"]
assert not t_releaselogit._validate_release_log(r_n)
r_n = copy.deepcopy(release_note)
r_n["1"]["a"] = | |
##------------------------------------------------------------------------.
# Retrieve colormap and colorbar args
cmap_params, cbar_kwargs = _process_cmap_cbar_kwargs(xr_contour,
masked_arr.data,
**locals(),
_is_facetgrid=kwargs.pop("_is_facetgrid", False))
##------------------------------------------------------------------------.
# If colors == 'a single color', matplotlib draws dashed negative contours.
# We lose this feature if we pass cmap and not colors
if isinstance(colors, str):
cmap_params["cmap"] = None
##------------------------------------------------------------------------.
# Define axis type
if subplot_kws is None:
subplot_kws = dict()
ax = get_axis(figsize, size, aspect, ax, **subplot_kws)
##------------------------------------------------------------------------.
# Retrieve nodes coordinates
lons = darray[x].values
lats = darray[y].values
##------------------------------------------------------------------------.
# Plot contour
if plot_type=="tricontour":
primitive = ax.tricontour(lons, lats, masked_arr.data,
transform = transform,
# Color options
vmin=cmap_params['vmin'],
vmax=cmap_params['vmax'],
cmap=cmap_params['cmap'],
norm=cmap_params['norm'],
extend=cmap_params["extend"],
levels=cmap_params["levels"],
colors=colors,
alpha=alpha,
# Line options
linewidths = linewidths,
linestyles = linestyles,
antialiased = antialiased,
# Other args
**kwargs)
##------------------------------------------------------------------------.
# Plot with contour
if plot_type=="contour":
lons_new = np.linspace(-180, 180, 360*2)
lats_new = np.linspace(-90, 90, 180*2)
lons_2d, lats_2d = np.meshgrid(lons_new, lats_new)
data_new = griddata((lons, lats), masked_arr.data, (lons_2d, lats_2d), method='linear')
# Add a new longitude band at 360. equals to 0.
data_new, lons_new = add_cyclic_point(data_new, coord=lons_new)
# Plot contourf
primitive = ax.contour(lons_new, lats_new, data_new,
transform = transform,
# Color options
vmin=cmap_params['vmin'],
vmax=cmap_params['vmax'],
cmap=cmap_params['cmap'],
norm=cmap_params['norm'],
extend=cmap_params["extend"],
levels=cmap_params["levels"],
alpha=alpha,
# Line options
linewidths = linewidths,
linestyles = linestyles,
antialiased = antialiased,
**kwargs)
# Set global axis
ax.set_global()
##------------------------------------------------------------------------.
# Make the contours line invisible.
if not add_contour:
plt.setp(primitive.collections, visible=False)
##------------------------------------------------------------------------.
# Add contour labels
if add_contour_labels:
ax.clabel(primitive,
colors=contour_labels_colors,
fontsize=contour_labels_fontsize,
manual=add_contour_labels_interactively,
inline=contour_labels_inline,
inline_spacing=contour_labels_inline_spacing,
fmt=contour_labels_format)
# Set global
ax.set_global()
##------------------------------------------------------------------------.
# Add labels
if add_labels:
ax.set_title(darray._title_for_slice())
##------------------------------------------------------------------------.
# Add colorbar
if add_colorbar:
if "label" not in cbar_kwargs and add_labels:
cbar_kwargs["label"] = label_from_attrs(darray)
cbar = _add_colorbar(primitive, ax, cbar_ax, cbar_kwargs, cmap_params)
else:
# Inform the user about keywords which aren't used
if cbar_ax is not None or cbar_kwargs:
raise ValueError("cbar_ax and cbar_kwargs can't be used with add_colorbar=False.")
##------------------------------------------------------------------------.
return primitive
def _contourf(darray,
x='lon',
y='lat',
transform=None,
# Facetgrids arguments
figsize=None,
size=None,
aspect=None,
ax=None,
row=None,
col=None,
col_wrap=None,
subplot_kws=None,
# Colors option
plot_type="contourf",
antialiased=True,
alpha=1,
colors=None,
levels=None,
cmap=None,
norm=None,
center=None,
vmin=None,
vmax=None,
robust=False,
extend='both',
# Colorbar options
add_colorbar=None,
cbar_ax=None,
cbar_kwargs=None,
# Axis options
add_labels=True,
**kwargs):
"""
Contourf plotting method for unstructured mesh.
The DataArray must have the attribute 'nodes'.
Parameters
----------
darray : DataArray
Must be 2 dimensional, unless creating faceted plots.
figsize : tuple, optional
A tuple (width, height) of the figure in inches.
Mutually exclusive with ``size`` and ``ax``.
aspect : scalar, optional
Aspect ratio of plot, so that ``aspect * size`` gives the width in
inches. Only used if a ``size`` is provided.
size : scalar, optional
If provided, create a new figure for the plot with the given size.
Height (in inches) of each plot. See also: ``aspect``.
ax : matplotlib axes object, optional
Axis on which to plot this figure. By default, use the current axis.
Mutually exclusive with ``size`` and ``figsize``.
row : string, optional
If passed, make row faceted plots on this dimension name
col : string, optional
If passed, make column faceted plots on this dimension name
col_wrap : int, optional
Use together with ``col`` to wrap faceted plots
add_colorbar : bool, optional
Adds colorbar to axis
add_labels : bool, optional
Use xarray metadata to label axes
antialiased: bool, optional
Enable antialiasing, overriding the defaults. For filled contours, the default is True.
plot_type : str, optional
Whether to use the "contourf" or "tricontourf" function.
The default is "contour".
vmin, vmax : floats, optional
Values to anchor the colormap, otherwise they are inferred from the
data and other keyword arguments. When a diverging dataset is inferred,
setting one of these values will fix the other by symmetry around
``center``. Setting both values prevents use of a diverging colormap.
If discrete levels are provided as an explicit list, both of these
values are ignored.
robust : bool, optional
If True and ``vmin`` or ``vmax`` are absent, the colormap range is
computed with 2nd and 98th percentiles instead of the extreme values.
levels : int or list-like object, optional
Split the colormap (cmap) into discrete color intervals. If an integer
is provided, "nice" levels are chosen based on the data range: this can
imply that the final number of levels is not exactly the expected one.
Setting ``vmin`` and/or ``vmax`` with ``levels=N`` is equivalent to
setting ``levels=np.linspace(vmin, vmax, N)``.
cmap : matplotlib colormap name or object, optional
The mapping from data values to color space.
If not provided, this will be either be ``viridis``
(if the function infers a sequential dataset) or
``RdBu_r`` (if the function infers a diverging dataset).
Is mutually exclusive with the color argument.
When `Seaborn` is installed, ``cmap`` may also be a `seaborn`
color palette.
If ``cmap`` is a seaborn color palette, ``levels`` must not be specified.
center : float, optional
The value at which to center the colormap. Passing this value implies
use of a diverging colormap. Setting it to ``False`` prevents use of a
diverging colormap.
norm : ``matplotlib.colors.Normalize`` instance, optional
If the ``norm`` has vmin or vmax specified, the corresponding kwarg
must be None.
colors : discrete colors to plot, optional
A single color or a list of colors.
Is mutually exclusive with cmap argument.
Specification of ``levels`` argument is not mandatory.
alpha : float, default: 1
The alpha blending value, between 0 (transparent) and 1 (opaque).
extend : {"neither", "both", "min", "max"}, optional
Determines the contourf-coloring of values that are outside the levels range and
wheter to draw arrows extending the colorbar beyond its limits.
If 'neither' (the default), values outside the levels range are not colored.
If 'min', 'max' or 'both', color the values below, above or below and above the levels range.
Values below min(levels) and above max(levels) are mapped to the under/over
values of the Colormap.
Note that most colormaps do not have dedicated colors for these by default,
so that the over and under values are the edge values of the colormap.
You may want to set these values explicitly using Colormap.set_under and Colormap.set_over.
subplot_kws : dict, optional
Dictionary of keyword arguments for matplotlib subplots. Only used
for 2D and FacetGrid plots.
cbar_ax : matplotlib Axes, optional
Axes in which to draw the colorbar.
cbar_kwargs : dict, optional
Dictionary of keyword arguments to pass to the colorbar.
**kwargs : optional
Additional arguments to mpl.collections.PatchCollection
Returns
-------
artist :
The same type of primitive artist that the wrapped matplotlib
function returns
"""
# Check is a DataArray
if not isinstance(darray, xr.DataArray):
raise TypeError("Provide a DataArray to xsphere._plot()")
# Checks plot_type
if not isinstance(plot_type, str):
raise TypeError("'plot_type' must be a string: either 'contourf' or 'tricontourf'")
if plot_type not in ['contourf','tricontourf']:
raise NotImplementedError("'plot_type' accept only 'contourf' or 'tricontourf' options.")
# Check ax
if ax is None and row is None and col is None:
raise ValueError("'ax' must be specified when not plotting a FacetGrids.")
# Check transform
if transform is None:
transform =ccrs.PlateCarree()
# Check x and y are coords of the xarray object
check_xy(darray, x=x, y=y)
##------------------------------------------------------------------------.
# Handle facetgrids first
if row or col:
if subplot_kws is None:
print("Tip: If you want to plot a map, you need to specify the projection \
using the argument subplot_kws={'projection': cartopy.crs.Robinson()}")
allargs = locals().copy()
del allargs["darray"]
allargs.update(allargs.pop("kwargs"))
return _easy_facetgrid(data=darray, plotfunc=_contourf, **allargs)
##------------------------------------------------------------------------.
# Initialize plot
plt = import_matplotlib_pyplot()
##------------------------------------------------------------------------.
# Pass the data as a masked ndarray too
masked_arr = darray.to_masked_array(copy=False)
##------------------------------------------------------------------------.
# Retrieve colormap and colorbar args
cmap_params, cbar_kwargs = _process_cmap_cbar_kwargs(xr_contourf,
masked_arr.data,
**locals(),
_is_facetgrid=kwargs.pop("_is_facetgrid", False))
##------------------------------------------------------------------------.
# If colors == 'a single color', matplotlib draws dashed negative contours.
# We lose this feature if | |
"""Integer optimization of livestock and water services."""
import os
import sys
import shutil
from osgeo import gdal
import re
import pandas
import numpy as np
import pygeoprocessing.geoprocessing
import marginal_value as mv
def integer_optim_Peru(objective_list, suf):
"""Calculate optimal intervention portfolio for a set of objective weights.
Parameters:
objective_list (list): list of objective objects containing info about
each objective used to construct the optimization problem including
objective weight, target, and whether it should be maximized or not
suf (string): results suffix that will be appended to the filename of
solution
Side effects:
creates or modifies a csv file containing the solution, the optimal
intervention set
creates or modifies a csv file containing scores, objective scores for
the optimal intervention set
Returns:
solution_filename, path to csv file where solution was saved
"""
intervention_list = [
'camelid_high', 'camelid_high_rot', 'camelid_low', 'camelid_low_rot',
'cow_high', 'cow_high_rot', 'cow_low', 'cow_lot_rot',
'sheep_high', 'sheep_high_rot', 'sheep_low', 'sheep_low_rot']
pdict = {
u'outerdir': os.path.join(
_DATA_INPUT_DIR, 'animal_weights_literature_default_beta'),
u'rau_shp': os.path.join(_DATA_INPUT_DIR, 'canete_basin.shp'),
u'lulc': os.path.join(_DATA_INPUT_DIR, 'Final_cobertura_Canete.tif')}
intermediate_dir = os.path.join(pdict[u'outerdir'], 'intermediate')
if not os.path.exists(intermediate_dir):
os.makedirs(intermediate_dir)
pdict[u'intermediate'] = intermediate_dir
output_dir = os.path.join(pdict[u'outerdir'], 'output')
if not os.path.exists(output_dir):
os.makedirs(output_dir)
pdict[u'output'] = output_dir
rau = 0
csv_folder = os.path.join(pdict['outerdir'], 'marginal_value_csvs')
tables_folder = 'integer_optimizer_data'
def generate_ll_input_data():
tables_list = mv.margv_tables_from_csv(
pdict, objective_list, csv_folder, tables_folder)
# move marginal value tables just generated
for obj in objective_list:
int_folder = os.path.join(intermediate_dir, 'rau_%s' % obj.name)
if not os.path.exists(int_folder):
os.makedirs(int_folder)
copyfrom = os.path.join(
intermediate_dir, tables_folder, '%s_rau0.npy' % obj.name)
copyto = os.path.join(int_folder, '%s_rau0.npy' % obj.name)
shutil.copyfile(copyfrom, copyto)
copyto = os.path.join(int_folder, 'rau0.npy')
shutil.copyfile(copyfrom, copyto)
# normalize values
for objective in objective_list:
if objective.name == 'cost' or objective.name == 'Cost':
continue
folder = os.path.join(
pdict[u'intermediate'], 'rau_' + objective.name)
mv.normalize_values(folder, objective.maximize)
# rename normalized arrays
for obj in objective_list:
copyfrom = os.path.join(
pdict[u'intermediate'], 'rau_%s' % obj.name, 'norm',
'%s_rau0.npy' % obj.name)
copyto = os.path.join(
pdict[u'intermediate'], 'rau_%s' % obj.name, 'norm',
'rau0.npy')
shutil.move(copyfrom, copyto)
generate_ll_input_data()
ll_problem = {
'weights': {},
'targets': {},
'targettypes': {}}
for objective in objective_list:
ll_problem['weights'][objective.name] = objective.weight
if objective.l_target is not None:
ll_problem['targets'][objective.name] = objective.l_target
if objective.target_type is not None:
ll_problem['targettypes'][objective.name] = objective.target_type
ll_data = {'factornames': []}
for objective in objective_list:
ll_data['factornames'].append(objective.name)
if objective.name == 'cost' or objective.name == 'Cost':
rau_dir = os.path.join(
pdict[u'intermediate'], 'rau_' + objective.name)
else:
rau_dir = os.path.join(
pdict[u'intermediate'], 'rau_' + objective.name, 'norm')
file = os.path.join(rau_dir, 'rau' + str(rau) + '.npy')
if not os.path.isfile(file):
raise Exception("file %s not found" % file)
ll_data[objective.name] = np.load(file)
# get un-normalized objective data
ll_marg_data = ll_data.copy()
for objective in objective_list:
rau_dir = os.path.join(
pdict[u'intermediate'], 'rau_' + objective.name)
file = os.path.join(rau_dir, 'rau' + str(rau) + '.npy')
if not os.path.isfile(file):
raise Exception("file %s not found" % file)
ll_marg_data[objective.name] = np.load(file)
solution, scores = mv.integer_optimization(
ll_data, ll_problem, rau, marg_data=ll_marg_data,
tiebreaker_intervention=0)
solution_filename = os.path.join(
pdict['outerdir'], 'output', 'solution%s.csv' % suf)
scores_filename = os.path.join(
pdict['outerdir'], 'output', 'scores%s.csv' % suf)
solution_df = pandas.DataFrame({'solution': solution})
solution_df.to_csv(solution_filename)
scores_df = pandas.DataFrame(scores, index=[0])
scores_df.to_csv(scores_filename)
return solution_filename
def translate_soln_to_lulc(solution_table, out_name):
"""Generate landcover raster from one optimal solution.
Parameters:
solution_table (string): path to csv table containing optimal
intervention portfolio according to one set of objective weights
out_name (string): file location where landcover raster should be
saved
Side effects:
creates or modifies a geotiff at the location `out_name`
Returns:
None
"""
hru_lulc_table = os.path.join(_DATA_INPUT_DIR, 'hru_definition_table.csv')
HRU_raster = os.path.join(_DATA_INPUT_DIR, 'HRU_all.tif')
HRU_codes = os.path.join(_DATA_INPUT_DIR, 'HRU_codes_11.8.16.csv')
sol_df = pandas.read_csv(solution_table)
HRU_df = pandas.read_csv(HRU_codes)
sol_joined = pandas.concat([sol_df, HRU_df], axis=1)
out_datatype = gdal.GDT_Int32
source_dataset = gdal.Open(HRU_raster)
band = source_dataset.GetRasterBand(1)
out_nodata = band.GetNoDataValue()
lulc_df = pandas.read_csv(hru_lulc_table)
merged_df = pandas.merge(sol_joined, lulc_df, on='HRU', how='outer')
merged_df['soln_int'] = merged_df['solution'].astype(float)
merged_df['sb_lu'] = merged_df['sb_lu'].astype(float)
merged_df.loc[
merged_df['solution'].notnull(), 'new_lulc'] = merged_df[
'sb_lu'] * 100 + merged_df['soln_int']
merged_df.loc[merged_df['solution'].isnull(), 'new_lulc'] = merged_df[
'sb_lu']
value_map = {row[3]: row[9] for row in merged_df.itertuples()}
pygeoprocessing.geoprocessing.reclassify_dataset_uri(
HRU_raster, value_map, out_name, out_datatype, out_nodata)
band = None
del source_dataset
def translate_solution(solution_csv, HRU_codes, HRU_raster, raster_out_uri):
"""Create raster showing optimal intervention for each HRU.
Parameters:
solution_csv (string): path to csv file containing the optimal
intervention portfolio according to one set of objective weights
HRU_codes (string): path to csv file containing HRU codes in a single
column. This file can be created by copying the 'zone' column from
one of the marginal value rasters (e.g.,
livestock_mv_by_HRU_9.29.16.csv).
HRU_raster (string): path to geotiff containing hydrologic response
units (HRUs) indexed to the integer codes in `HRU_codes`
raster_out_uri (string): path to location on disk where optimal
intervention geotiff should be saved
Side effects:
Creates or modifies the geotiff located at `raster_out_uri`
Returns:
None
"""
HRU_list = pygeoprocessing.geoprocessing.unique_raster_values_uri(
HRU_raster)
sol_df = pandas.read_csv(solution_csv)
HRU_df = pandas.read_csv(HRU_codes)
assert len(set(HRU_list) - set(HRU_df.HRU)) == 0, """Error: HRU raster does
not match HRU codes"""
sol_joined = pandas.concat([sol_df, HRU_df], axis=1)
out_datatype = 3
source_dataset = gdal.Open(HRU_raster)
band = source_dataset.GetRasterBand(1)
out_nodata = band.GetNoDataValue()
value_map = {row[3]: row[2] for row in sol_joined.itertuples()}
pygeoprocessing.geoprocessing.reclassify_dataset_uri(
HRU_raster, value_map, raster_out_uri, out_datatype, out_nodata)
band = None
del source_dataset
def integer_optim_wrapper():
"""Run integer optimization at series of objective weight combinations.
This function calls `integer_optim_Peru`, `translate_solution`, and
`translate_soln_to_lulc` to calculate optimal interventions for a series of
objective weights and generate tables and maps from that solution.
Side effects:
creates or modifies files located at hard-coded locations on disk
Returns:
None
"""
weight_range = [0, 0.1, 0.3, 0.5, 0.7, 0.9, 1]
for livestock_weight in weight_range:
for sdr_weight in weight_range:
for swy_weight in weight_range:
if (livestock_weight == sdr_weight and
sdr_weight == swy_weight):
if swy_weight < 1: # only run once with equal weights
continue
if (livestock_weight == 0 and sdr_weight == 0 and
swy_weight == 0):
continue
sed_obj = mv.Objective(
'sdr', sdr_weight, None, None, None, maximize=False)
swy_obj = mv.Objective(
'swy', swy_weight, None, None, None, maximize=True)
livestock_obj = mv.Objective(
'livestock', livestock_weight, None, None, None,
maximize=True)
objective_list = [sed_obj, swy_obj, livestock_obj]
suf = 'livestock_{}_sdr_{}_swy_{}'.format(
livestock_obj.weight, sed_obj.weight, swy_obj.weight)
raster_out_uri = os.path.join(
_DATA_INPUT_DIR, 'animal_weights_literature_default_beta',
'output', 'solution_map{}'.format(suf))
if not os.path.exists(raster_out_uri):
solution_csv = integer_optim_Peru(objective_list, suf)
HRU_codes = os.path.join(
_DATA_INPUT_DIR, 'HRU_codes_11.8.16.csv')
HRU_raster = os.path.join(
_DATA_INPUT_DIR, 'HRU_priority_FESC_RYEG.tif')
translate_solution(
solution_csv, HRU_codes, HRU_raster, raster_out_uri)
lulc_out_name = os.path.join(
_DATA_INPUT_DIR,
'animal_weights_literature_default_beta',
'output', 'solution_lulc{}.tif'.format(suf))
translate_soln_to_lulc(solution_csv, lulc_out_name)
def collate_scores(output_folder, save_as):
"""Collect scores into a file for plotting frontiers.
Parameters:
output_folder (string): path to local file folder containing a series
of csv tables, one for each objective weight combination,
indicating objective scores for the optimal solution
save_as (string): path to location where summary of objective scores
across objective weights should be saved
Side effects:
creates or modifies the csv table indicated by the path `save_as`
Returns:
None
"""
scores_files = [
f for f in os.listdir(output_folder) if f.startswith('scores')]
f = scores_files[0]
sol_df = pandas.read_csv(os.path.join(output_folder, f))
objectives = sol_df.columns.values.tolist()
objectives.remove('objective')
objectives.remove('Unnamed: 0')
sum_dict = {}
for obj in objectives:
sum_dict['{}_weight'.format(obj)] = []
sum_dict['{}_score'.format(obj)] = []
for f in scores_files:
sol_df = pandas.read_csv(os.path.join(output_folder, f))
for obj in objectives:
score = sol_df.get_value(0, obj)
try:
pattern = '{}_(.+?)_'.format(obj)
weight = re.search(pattern, f).group(1)
except IndexError:
pattern = '{}_(.+?).csv'.format(obj)
weight = re.search(pattern, f).group(1)
sum_dict['{}_weight'.format(obj)].append(weight)
sum_dict['{}_score'.format(obj)].append(score)
sum_df = pandas.DataFrame(sum_dict)
sum_df.to_csv(save_as)
def collate_solutions(output_folder, objective_list):
"""Collect solutions from several portfolios.
Parameters:
output_folder (string): path to directory on disk that contains
solution summaries
objective_list (list): list of strings identifying the order of
objectives
Side effects:
creates or modifies the following files in `output_folder`:
'solution_summary.csv'
'solution_index.csv'
Returns:
None
"""
solutions_files = [
f for f in os.listdir(output_folder) if f.startswith('solution') and
f.endswith('.csv')]
df_list = []
weight_dict = {obj: [] for obj in objective_list}
weight_dict['soln_index'] = []
for idx in xrange(len(solutions_files)):
f = solutions_files[idx]
weight_dict['soln_index'].append(idx)
for obj in objective_list:
try:
pattern = '{}_(.+?)_'.format(obj)
weight = re.search(pattern, f).group(1)
except IndexError:
pattern = '{}_(.+?).csv'.format(obj)
weight = re.search(pattern, f).group(1)
weight_dict[obj].append(weight)
df = pandas.read_csv(os.path.join(output_folder, f))
del df['Unnamed: 0']
df.columns = [idx]
df_list.append(df)
result_df = pandas.concat(df_list, axis=1)
weight_df = pandas.DataFrame(weight_dict)
result_df.to_csv(os.path.join(output_folder, "solution_summary.csv"),
index=False)
weight_df.to_csv(os.path.join(output_folder, "solution_index.csv"),
index=False)
def solution_agreement(HRU_codes, solution_summary, save_as):
"""Calculate agreement metrics from a set of portfolios.
Parameters:
HRU_codes (string): path to csv file containing HRU codes in a single
column of integers
solution_summary (string): path to csv file containing summary of
solutions among objective weight combinations
save_as (string): path to location on disk where agreement metrics
should be saved
Side effects:
creates or modifies the csv file indicated by `save_as`
Returns:
None
"""
sol_df = pandas.read_csv(solution_summary)
stat_df = pandas.read_csv(HRU_codes)
# proportion of runs where each HRU | |
"""show_vxlan.py
NXOS parser for the following show commands:
* show nve peers
* show nve interface <nve> detail
* show nve ethernet-segment
* show nve vni
* show nve vni summary
* show nve multisite dci-links
* show nve multisite fabric-links
* show l2route fl all
* show l2route evpn ethernet-segment all
* show l2route topology detail
* show l2route mac all detail
* show l2route mac-ip all detail
* show l2route summary
* show nve vni ingress-replication
* show l2route evpn mac-ip all
* show l2route evpn mac-ip evi <evi>
"""
# Python
import re
# Metaparser
from genie.metaparser import MetaParser
from genie.metaparser.util.schemaengine import Schema, Any, Optional
from genie.libs.parser.utils.common import Common
class ShowL2routeEvpnImetAllDetailSchema(MetaParser):
schema = {
'vni': {
Any(): {
'ip': {
Any(): {
'topo_id': int,
'vni': int,
'prod_type': str,
'ip_addr': str,
'eth_tag_id': int,
'pmsi_flags': int,
'flags': str,
'type': int,
'vni_label': int,
'tunnel_id': str,
'client_nfn': int,
}
}
}
}
}
class ShowL2routeEvpnImetAllDetail(ShowL2routeEvpnImetAllDetailSchema):
"""Parser for show l2route evpn imet all detail """
cli_command = 'show l2route evpn imet all detail'
def cli(self, output=None):
# excute command to get output
out = output if output else self.device.execute(self.cli_command)
# Topology ID VNI Prod IP Addr Eth Tag PMSI-Flags Flags Type Label(VNI) Tunnel ID NFN Bitmap
# ----------- ----------- ----- --------------------------------------- ------- ---------- ------- ---- ----------- --------------------------------------- ----------
# 201 20001 BGP 2001:db8:646:a2bb:0:abcd:1234:3 0 0 - 6 20001 2001:db8:646:a2bb:0:abcd:1234:3 32
# 201 20001 BGP 2001:db8:646:a2bb:0:abcd:5678:1 0 0 - 6 20001 2001:db8:646:a2bb:0:abcd:5678:1 32
p1 = re.compile(r'^(?P<topo_id>[\d]+) + (?P<vni>[\d]+)'
' + (?P<prod_type>[\w]+) * (?P<ip_addr>[\w\:]+)'
' + (?P<eth_tag_id>[\d]+) + + (?P<pmsi_flags>[\d]+)'
' + (?P<flags>[\w-]) + (?P<type>[\d]+) + (?P<vni_label>[\d]+)'
' + (?P<tunnel_id>[\w\:]+) + (?P<client_nfn>[\d]+)$')
result_dict = {}
for line in out.splitlines():
line = line.strip()
m = p1.match(line)
if m:
group = m.groupdict()
vni = int(group['vni'])
ip = group['ip_addr']
vni_dict = result_dict.setdefault('vni', {}).\
setdefault(vni, {}).\
setdefault('ip', {}).\
setdefault(ip, {})
vni_dict['topo_id'] = int(group['topo_id'])
vni_dict['vni'] = vni
vni_dict['prod_type'] = group['prod_type']
vni_dict['ip_addr'] = ip
vni_dict['eth_tag_id'] = int(group['eth_tag_id'])
vni_dict['pmsi_flags'] = int(group['pmsi_flags'])
vni_dict['flags'] = group['flags']
vni_dict['type'] = int(group['type'])
vni_dict['vni_label'] = int(group['vni_label'])
vni_dict['tunnel_id'] = group['tunnel_id']
vni_dict['client_nfn'] = int(group['client_nfn'])
continue
return result_dict
# ====================================================
# schema for show nve peers
# ====================================================
class ShowNvePeersSchema(MetaParser):
"""Schema for:
show nve peers"""
schema = {
Any(): {
'nve_name': str,
'peer_ip': {
Any(): {
'peer_state': str,
'learn_type': str,
'uptime': str,
'router_mac': str,
},
},
},
}
# ====================================================
# parser for show nve peers
# ====================================================
class ShowNvePeers(ShowNvePeersSchema):
"""Parser for :
show nve peers"""
cli_command = 'show nve peers'
exclude = [
'uptime']
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Interface Peer-IP State LearnType Uptime Router-Mac
# nve1 192.168.16.1 Up CP 01:15:09 n/a
# nve1 192.168.106.1 Up CP 00:03:05 5e00.0002.0007
# nve1 2001:db8:646:a2bb:0:abcd:1234:3 Up CP 21:47:20 5254.0028.093a
# nve1 2001:db8:646:a2bb:0:abcd:1234:5 Up CP 21:47:20 5254.00dc.5da5
p1 = re.compile(r'^\s*(?P<nve_name>[\w\/]+) +(?P<peer_ip>[\w\.\:]+) +(?P<peer_state>[\w]+)'
' +(?P<learn_type>[\w]+) +(?P<uptime>[\w\:]+) +(?P<router_mac>[\w\.\/]+)$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
nve_name = group.pop('nve_name')
peer_ip = group.pop('peer_ip')
nve_dict = result_dict.setdefault(nve_name,{})
nve_dict.update({'nve_name': nve_name})
peer_dict = nve_dict.setdefault('peer_ip',{}).setdefault(peer_ip,{})
peer_dict.update({'learn_type': group.pop('learn_type')})
peer_dict.update({'uptime': group.pop('uptime')})
peer_dict.update({'router_mac': group.pop('router_mac')})
peer_dict.update({'peer_state': group.pop('peer_state').lower()})
continue
return result_dict
# ====================================================
# schema for show nve vni summary
# ====================================================
class ShowNveVniSummarySchema(MetaParser):
"""Schema for:
show nve vni summary"""
schema = {
'vni': {
'summary': {
'cp_vni_count': int,
'cp_vni_up': int,
'cp_vni_down': int,
'dp_vni_count': int,
'dp_vni_up': int,
'dp_vni_down': int,
},
},
}
# ====================================================
# parser for show nve vni summary
# ====================================================
class ShowNveVniSummary(ShowNveVniSummarySchema):
"""Parser for :
show nve vni summary"""
cli_command = 'show nve vni summary'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Total CP VNIs: 21 [Up: 21, Down: 0]
# Total DP VNIs: 0 [Up: 0, Down: 0]
p1 = re.compile(
r'^\s*Total +CP +VNIs: +(?P<cp_vni_count>[\d]+) +\[Up: +(?P<cp_vni_up>[\d]+), +Down: +(?P<cp_vni_down>[\d]+)\]$')
p2 = re.compile(
r'^\s*Total +DP +VNIs: +(?P<dp_vni_count>[\d]+) +\[Up: +(?P<dp_vni_up>[\d]+), +Down: +(?P<dp_vni_down>[\d]+)\]$')
for line in out.splitlines():
if line:
line = line.rstrip()
else:
continue
m = p1.match(line)
if m:
group = m.groupdict()
vni_dict = result_dict.setdefault('vni',{}).setdefault('summary',{})
vni_dict.update({k:int(v) for k,v in group.items()})
continue
m = p2.match(line)
if m:
group = m.groupdict()
vni_dict.update({k: int(v) for k, v in group.items()})
continue
return result_dict
# ====================================================
# schema for show nve vni
# ====================================================
class ShowNveVniSchema(MetaParser):
"""Schema for:
show nve vni"""
schema ={
Any(): {
'vni': {
Any(): {
'vni': int,
'mcast': str,
'vni_state': str,
'mode': str,
'type': str,
'flags': str,
}
}
}
}
# ====================================================
# Parser for show nve vni
# ====================================================
class ShowNveVni(ShowNveVniSchema):
"""parser for:
show nve vni"""
cli_command = 'show nve vni'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
result_dict = {}
# Interface VNI Multicast-group State Mode Type [BD/VRF] Flags
# --------- -------- ----------------- ----- ---- ------------------ -----
# nve1 5001 172.16.31.10 Up CP L2 [1001]
# nve1 5001 192.168.0.1 Up CP L2 [1001] SA MS-IR
p1 = re.compile(r'^(?P<nve_name>[\w\/]+) +(?P<vni>[\d]+) +(?P<mcast>[\w\.\/]+) +'
r'(?P<vni_state>[\w]+) +(?P<mode>[\w]+) +(?P<type>\w+ +\[[\w\-]+\])'
r'(?: +(?P<flags>[\w\-\s]+))?$')
for line in out.splitlines():
line = line.strip()
m = p1.match(line)
if m:
group = m.groupdict()
nve_name = group.pop('nve_name')
vni = int(group.pop('vni'))
nve_dict = result_dict.setdefault(nve_name,{}).setdefault('vni',{}).setdefault(vni,{})
nve_dict.update({'vni': vni})
nve_dict.update({'mcast': group.pop('mcast').lower()})
nve_dict.update({'vni_state': group.pop('vni_state').lower()})
nve_dict.update({'mode': group.pop('mode')})
nve_dict.update({'type': group.pop('type')})
if group.get('flags'):
nve_dict.update({'flags': group.pop('flags')})
else:
nve_dict.update({'flags': ''})
continue
return result_dict
# ====================================================
# schema for show interface | i nve
# ====================================================
class ShowNveInterfaceSchema(MetaParser):
"""Schema for:
show nve interface | i nve"""
schema = {
'nves':
{Any():
{'nve_name': str,
'nve_state': str,
},
},
}
#=======================================
# show interface | i nve
#=======================================
class ShowNveInterface(ShowNveInterfaceSchema):
"""Parser for show interface | i nve"""
cli_command = 'show interface | i nve'
def cli(self, output=None):
# excute command to get output
if output is None:
out = self.device.execute(self.cli_command)
else:
out = output
# Init vars
result_dict = {}
# nve1 is down (other)
p1 = re.compile(r'^\s*nve(?P<nve>(\d+)) +is +(?P<nve_state>[\w]+)( +(?P<other>[\w\(\)]+))?$')
for line in out.splitlines():
line = line.rstrip()
m = p1.match(line)
if m:
group = m.groupdict()
nve_name = "{}{}".format('nve',group.pop('nve'))
nve_dict = result_dict.setdefault('nves', {}).setdefault(nve_name,{})
nve_dict.update({'nve_name': nve_name})
nve_dict.update({'nve_state': group.pop('nve_state').lower()})
continue
return result_dict
# ====================================================
# schema for show nve interface <nve> detail
# ====================================================
class ShowNveInterfaceDetailSchema(MetaParser):
"""Schema for:
show nve interface <nve> detail"""
schema ={
Any(): {
'nve_name': str,
Optional('if_state'): str,
Optional('encap_type'): str,
Optional('vpc_capability'): str,
Optional('local_rmac'): str,
Optional('host_reach_mode'): str,
Optional('source_if'): str,
Optional('primary_ip'): str,
Optional('secondary_ip'): str,
Optional('src_if_state'): str,
Optional('ir_cap_mode'): str,
Optional('adv_vmac'): bool,
Optional('nve_flags'): str,
Optional('nve_if_handle'): int,
Optional('src_if_holddown_tm'): int,
Optional('src_if_holdup_tm'): int,
Optional('src_if_holddown_left'): int,
Optional('multisite_convergence_time'): int,
Optional('multisite_convergence_time_left'): int,
Optional('vip_rmac'): str,
Optional('vip_rmac_ro'): str,
Optional('sm_state'): str,
Optional('peer_forwarding_mode'): bool,
Optional('dwn_strm_vni_cfg_mode'): str,
Optional('src_intf_last_reinit_notify_type'): str,
Optional('mcast_src_intf_last_reinit_notify_type'): str,
Optional('multi_src_intf_last_reinit_notify_type'): str,
Optional('multisite_bgw_if'): str,
Optional('multisite_bgw_if_ip'): str,
Optional('multisite_bgw_if_admin_state'): str,
Optional('multisite_bgw_if_oper_state'): str,
Optional('multisite_bgw_if_oper_state_down_reason'): str,
}
}
# ====================================================
# schema for show nve interface <nve> detail
# ====================================================
class ShowNveInterfaceDetail(ShowNveInterfaceDetailSchema):
"""parser for:
show nve interface <nve> detail"""
cli_command = 'show nve interface {interface} detail'
def cli(self, interface=""):
nve_list = []
if interface:
nve_list.append(interface)
if not interface:
cmd1 = 'show interface | i nve'
out1 = self.device.execute(cmd1)
# Init vars
# nve1 is down (other)
p1 = re.compile(r'^\s*nve(?P<nve>(\d+)) +is +(?P<nve_state>[\w]+)( +(?P<other>[\w\(\)]+))?$')
for line in out1.splitlines():
line = line.rstrip()
m = p1.match(line)
if m:
group = m.groupdict()
nve_name = '{}{}'.format('nve', group.get('nve'))
nve_list.append(nve_name)
continue
result_dict = {}
# Interface: nve1, State: Up, encapsulation: VXLAN
p1 = re.compile(r'^\s*Interface: +(?P<nve_name>[\w\/]+), +State: +(?P<state>[\w]+),'
' +encapsulation: +(?P<encapsulation>[\w]+)$')
p2 = re.compile(r'^\s*VPC Capability: +(?P<vpc_capability>[\w\s\-\[\]]+)$')
p3 = re.compile(r'^\s*Local Router MAC: +(?P<local_router_mac>[\w\.]+)$')
p4 = re.compile(r'^\s*Host Learning Mode: +(?P<host_learning_mode>[\w\-]+)$')
p5 = re.compile(r'^\s*Source-Interface: +(?P<source_if>[\w\/]+)'
' +\(primary: +(?P<primary_ip>[\w\.]+), +secondary: +(?P<secondary_ip>[\w\.]+)\)$')
# Source-Interface: loopback1 (primary: 2001:db8:646:a2bb:0:abcd:1234:4)
# Anycast-Interface: loopback2 (secondary: 2001:db8:646:a2bb:0:abcd:5678:5)
p5_1 = re.compile(r'^\s*Source-Interface: +(?P<source_if>[\w\/]+) +\(primary: +(?P<primary_ip>[\w\.\:]+)\)')
p5_2 = re.compile(r'^\s*Anycast-Interface: +(?P<anycast_if>[\w\/]+) +\(secondary: +(?P<secondary_ip>[\w\.\:]+)\)')
p6 = re.compile(r'^\s*Source +Interface +State: +(?P<source_state>[\w]+)$')
p7 = re.compile(r'^\s*IR +Capability +Mode: +(?P<mode>[\w]+)$')
p8 = re.compile(r'^\s*Virtual +RMAC +Advertisement: +(?P<adv_vmac>[\w]+)$')
p9 = re.compile(r'^\s*NVE +Flags:( +(?P<flags>[\w]+))?$')
p10 = re.compile(r'^\s*Interface +Handle: +(?P<intf_handle>[\w]+)$')
p11 = re.compile(r'^\s*Source +Interface +hold-down-time: +(?P<hold_down_time>[\d]+)$')
p12 = re.compile(r'^\s*Source +Interface +hold-up-time: +(?P<hold_up_time>[\d]+)$')
p13 = re.compile(r'^\s*Remaining +hold-down +time: +(?P<hold_time_left>[\d]+) +seconds$')
p14 = re.compile(r'^\s*Virtual +Router +MAC: +(?P<v_router_mac>[\w\.]+)$')
p15 = re.compile(r'^\s*Virtual +Router +MAC +Re\-origination: +(?P<v_router_mac_re>[\w\.]+)$')
p16 = re.compile(r'^\s*Interface +state: +(?P<intf_state>[\w\-]+)$')
p17 = re.compile(r'^\s*unknown-peer-forwarding: +(?P<peer_forwarding>[\w]+)$')
p18 = re.compile(r'^\s*down-stream +vni +config +mode: +(?P<vni_config_mode>[\w\/]+)$')
p19 = re.compile(r'^\s*Nve +Src +node +last +notif +sent: | |
#!/usr/bin/python3
"""
Module for querying weather forecast data from OpenWeatherMap and storage in database
"""
import json
import requests
import datetime
# Set up logging
import logging
import logging_plus
logger = logging_plus.getLogger(__name__)
logger.addHandler(logging.NullHandler())
# Defaults
# Current / hourly forecast
cfc = {
"timestamp" : None,
"temperature" : None,
"humidity" : None,
"pressure" : None,
"clouds" : None,
"uvi" : None,
"visibility" : None,
"windspeed" : None,
"winddir" : None,
"rain" : None,
"snow" : None,
"description" : None,
"icon" : None,
"alerts" : 0
}
# Daily forecast
dfc = {
"date": None,
"sunrise": None,
"sunset": None,
"temperature_m": None,
"temperature_d": None,
"temperature_e": None,
"temperature_n": None,
"temperature_min": None,
"temperature_max": None,
"humidity" : None,
"pressure" : None,
"windspeed" : None,
"winddir" : None,
"clouds" : None,
"uvi" : None,
"pop": None,
"rain" : None,
"snow" : None,
"description" : None,
"icon" : None,
"alerts" : 0
}
def getForecast(url, payload):
"""
Get weather forecast data from openweb service
"""
fcr = requests.get(url, params=payload)
if fcr.status_code != requests.codes.ok:
fcr.raise_for_status()
try:
fcrj = fcr.json()
except Exception as e:
logger.error("Error parsing response: %s", e)
fcrj = None
logger.error("Request URL : %s", url)
logger.error("Request payload: %s", payload)
logger.error("Response : %s", fcr.text)
return fcrj
def mapForecast(fc, ts):
"""
Map forecast data to standard structure
"""
global cfc
# Map current forecast
curfc = cfc.copy()
curfc["timestamp"] = ts
curfc["temperature"] = fc["current"]["temp"]
curfc["humidity"] = fc["current"]["humidity"]
curfc["pressure"] = fc["current"]["pressure"]
curfc["clouds"] = fc["current"]["clouds"]
curfc["uvi"] = fc["current"]["uvi"]
curfc["visibility"] = fc["current"]["visibility"]
curfc["windspeed"] = fc["current"]["wind_speed"]
curfc["winddir"] = fc["current"]["wind_deg"]
if "rain" in fc["current"]:
curfc["rain"] = fc["current"]["rain"]["1h"]
if "snow" in fc["current"]:
curfc["snow"] = fc["current"]["snow"]["1h"]
if len(fc["current"]["weather"]) > 0:
w = fc["current"]["weather"][0]
curfc["description"] = w["description"]
curfc["icon"] = w["icon"]
curfc["alerts"] = getAlerts(fc, fc["current"]["dt"])
# Map hourly forecast
hourlyfc = list()
if len(fc["hourly"]) > 0:
for i in range(0, len(fc["hourly"])):
hourfc = cfc.copy()
hfc = fc["hourly"][i]
hourfc["timestamp"] = datetime.datetime.fromtimestamp(hfc["dt"]).strftime("%Y-%m-%d %H:%M:%S")
hourfc["temperature"] = hfc["temp"]
hourfc["humidity"] = hfc["humidity"]
hourfc["pressure"] = hfc["pressure"]
hourfc["clouds"] = hfc["clouds"]
hourfc["uvi"] = hfc["uvi"]
hourfc["visibility"] = hfc["visibility"]
hourfc["windspeed"] = hfc["wind_speed"]
hourfc["winddir"] = hfc["wind_deg"]
if "rain" in hfc:
hourfc["rain"] = hfc["rain"]["1h"]
if "snow" in hfc:
hourfc["snow"] = hfc["snow"]["1h"]
if len(hfc["weather"]) > 0:
w = hfc["weather"][0]
hourfc["description"] = w["description"]
hourfc["icon"] = w["icon"]
hourfc["alerts"] = getAlerts(fc, hfc["dt"])
hourlyfc.append(hourfc)
# Map daily forecast
dailyfc = list()
if len(fc["daily"]) > 0:
for i in range(0, len(fc["daily"])):
dayfc = dfc.copy()
dyfc = fc["daily"][i]
dayfc["date"] = datetime.datetime.fromtimestamp(dyfc["dt"]).strftime("%Y-%m-%d")
dayfc["sunrise"] = datetime.datetime.fromtimestamp(dyfc["sunrise"]).strftime("%H:%M:%S")
dayfc["sunset"] = datetime.datetime.fromtimestamp(dyfc["sunset"]).strftime("%H:%M:%S")
dyfct = dyfc["temp"]
dayfc["temperature_m"] = dyfct["morn"]
dayfc["temperature_d"] = dyfct["day"]
dayfc["temperature_e"] = dyfct["eve"]
dayfc["temperature_n"] = dyfct["night"]
dayfc["temperature_min"] = dyfct["min"]
dayfc["temperature_max"] = dyfct["max"]
dayfc["humidity"] = dyfc["humidity"]
dayfc["pressure"] = dyfc["pressure"]
dayfc["windspeed"] = dyfc["wind_speed"]
dayfc["winddir"] = dyfc["wind_deg"]
dayfc["clouds"] = dyfc["clouds"]
dayfc["uvi"] = dyfc["uvi"]
dayfc["pop"] = dyfc["pop"]
if "rain" in dyfc:
dayfc["rain"] = dyfc["rain"]
if "snow" in dyfc:
dayfc["snow"] = dyfc["snow"]
if len(dyfc["weather"]) > 0:
w = dyfc["weather"][0]
dayfc["description"] = w["description"]
dayfc["icon"] = w["icon"]
dayfc["alerts"] = getAlerts(fc, dyfc["dt"])
dailyfc.append(dayfc)
return [curfc, hourlyfc, dailyfc]
def getAlerts(fc, dt):
"""
Count the number of alerts for a given date/time (dt)
"""
res = 0
if "alerts" in fc:
if len(fc["alerts"]) > 0:
for i in range(0, len(fc["alerts"])):
alert = fc["alerts"][i]
if dt >= alert["start"] and dt <= alert["end"]:
res = res + 1
return res
def forecastToDb(fcData, cfg, curTs, curDate, dbCon, dbCur, servRun):
"""
Store forecast data in database
"""
#
# Store current and hourly forecast
#
tblHourly = cfg["forecast"]["forecastTables"]["hourlyForecast"]
# Clean up current / hourly forecast
# Retain forecast for the next fcRetainHours hours
fcRetainHours = cfg["forecast"]["forecastRetain"]
t_lastTs = getLatestForecast(tblHourly, dbCon, dbCur, servRun)
if t_lastTs:
t_lastTs = t_lastTs + datetime.timedelta(minutes=1)
t_curTs = datetime.datetime.strptime(curTs, "%Y-%m-%d %H:%M:%S")
t_limTs = t_curTs + datetime.timedelta(hours=fcRetainHours)
if t_lastTs < t_limTs:
t_limTs = t_lastTs
limTs = t_limTs.strftime("%Y-%m-%d %H:%M:%S")
if limTs < curTs:
limTs = curTs
else:
limTs = curTs
forecastToDbHourlyCleanup(tblHourly, limTs, dbCon, dbCur, servRun)
# Insert Current forecast
curfc = fcData[0]
forecastToDbCurrent(curfc, tblHourly, dbCon, dbCur, servRun)
# Insert hourly forecast
hourfc = fcData[1]
if len(hourfc) > 0:
for i in range(0, len(hourfc)):
curfc = hourfc[i]
if curfc["timestamp"] >= limTs:
forecastToDbHourly(curfc, tblHourly, dbCon, dbCur, servRun)
elif (curfc["timestamp"] >= curTs) and (curTs < limTs):
forecastToDbCurrent(curfc, tblHourly, dbCon, dbCur, servRun)
#
# Store daily forecast
#
tblDaily = cfg["forecast"]["forecastTables"]["dailyForecast"]
# Clean up daily forecast
forecastToDbDailyCleanup(tblDaily, curDate, dbCon, dbCur, servRun)
# Insert daily forecast
dayfc = fcData[2]
if len(dayfc) > 0:
for i in range(0, len(dayfc)):
curfc = dayfc[i]
if curfc["date"] >= curDate:
forecastToDbDaily(curfc, tblDaily, dbCon, dbCur, servRun)
def getLatestForecast(tbl, dbCon, dbCur, servRun):
"""
Return the timestamp for the latest forecast.
"""
# Prepare statement
stmt = "SELECT timestamp FROM " + tbl + " ORDER BY TIMESTAMP DESC LIMIT 0,1"
logger.debug(stmt)
dbCur.execute(stmt)
res = None
for (timestamp) in dbCur:
res = timestamp[0]
return res
def forecastToDbHourlyCleanup(tbl, ts, dbCon, dbCur, servRun):
"""
Remove entries for later timestamps.
This is necessary in order to allow later insertion of forecast entries
"""
# Prepare statement
stmt = "DELETE FROM " + tbl + " WHERE timestamp >= '" + ts + "'"
logger.debug(stmt)
dbCur.execute(stmt)
dbCon.commit()
def forecastToDbDailyCleanup(tbl, curDate, dbCon, dbCur, servRun):
"""
Remove entries for later timestamps.
This is necessary in order to allow later insertion of forecast entries
"""
# Prepare statement
stmt = "DELETE FROM " + tbl + " WHERE date >= '" + curDate + "'"
logger.debug(stmt)
dbCur.execute(stmt)
dbCon.commit()
def forecastToDbCurrent(fc, tbl, dbCon, dbCur, servRun):
"""
Store current forecast data in database
"""
global logger
# Prepare statement
ins1 = "INSERT INTO " + tbl + " (timestamp"
ins2 = "VALUES ('" + fc["timestamp"] + "'"
ins3 = " ON DUPLICATE KEY UPDATE "
if fc["temperature"] != None:
ins1 = ins1 + ", temperature"
ins2 = ins2 + ", " + "{:+.1f}".format(fc["temperature"])
ins3 = ins3 + "temperature="
ins3 = ins3 + "{:+.1f}".format(fc["temperature"])
if fc["humidity"] != None:
ins1 = ins1 + ", humidity"
ins2 = ins2 + ", " + "{:+.1f}".format(fc["humidity"])
ins3 = ins3 + ", humidity="
ins3 = ins3 + "{:+.1f}".format(fc["humidity"])
if fc["pressure"] != None:
ins1 = ins1 + ", pressure"
ins2 = ins2 + ", " + "{:+.1f}".format(fc["pressure"])
ins3 = ins3 + ", pressure="
ins3 = ins3 + "{:+.1f}".format(fc["pressure"])
if fc["clouds"] != None:
ins1 = ins1 + ", clouds"
ins2 = ins2 + ", " + "{:+.1f}".format(fc["clouds"])
ins3 = ins3 + ", clouds="
ins3 = ins3 + "{:+.1f}".format(fc["clouds"])
if fc["uvi"] != None:
ins1 = ins1 + ", uvi"
ins2 = ins2 + ", " + "{:+.2f}".format(fc["uvi"])
ins3 = ins3 + ", uvi="
ins3 = ins3 + "{:+.2f}".format(fc["uvi"])
if fc["visibility"] != None:
ins1 = ins1 + ", visibility"
ins2 = ins2 + ", " + "{:+.1f}".format(fc["visibility"])
ins3 = ins3 + ", visibility="
ins3 = ins3 + "{:+.1f}".format(fc["visibility"])
if fc["windspeed"] != None:
ins1 = ins1 + ", windspeed"
ins2 = ins2 + ", " + "{:+.1f}".format(fc["windspeed"])
ins3 = ins3 + ", windspeed="
ins3 = ins3 + "{:+.1f}".format(fc["windspeed"])
if fc["winddir"] != None:
ins1 = ins1 + ", winddir"
ins2 = ins2 + ", " + "{:+.1f}".format(fc["winddir"])
ins3 = ins3 + ", winddir="
ins3 = ins3 + "{:+.1f}".format(fc["winddir"])
if fc["rain"] != None:
ins1 = ins1 + ", rain"
ins2 = ins2 + ", " + "{:+.2f}".format(fc["rain"])
ins3 = ins3 + ", rain="
ins3 = ins3 + "{:+.2f}".format(fc["rain"])
if fc["snow"] != None:
ins1 = ins1 + ", snow"
ins2 = ins2 + ", " + "{:+.2f}".format(fc["snow"])
ins3 = ins3 + ", snow="
ins3 = ins3 + "{:+.2f}".format(fc["snow"])
if fc["description"] != None:
ins1 = ins1 + ", description"
ins2 = ins2 + ", '" + fc["description"] + "'"
ins3 = ins3 + ", description="
ins3 = ins3 + "'" + fc["description"] + "'"
if fc["icon"] != None:
ins1 = ins1 + ", icon"
ins2 = ins2 + ", '" + fc["icon"] + "'"
ins3 = ins3 + ", icon="
ins3 = ins3 + "'" + fc["icon"] + "'"
if fc["alerts"] != None:
ins1 = ins1 + ", alerts"
ins2 = ins2 + ", " + "{}".format(fc["alerts"])
ins3 = ins3 + ", alerts="
ins3 = ins3 + "{}".format(fc["alerts"])
tnow = datetime.datetime.now()
ins1 = ins1 + ", time_cre"
ins2 = ins2 + ", '" + tnow.strftime("%Y-%m-%d | |
object.
Fields:
filter: The standard list filter.
name: The name of the operation collection.
pageSize: The standard list page size.
pageToken: The standard list page token.
"""
filter = _messages.StringField(1)
name = _messages.StringField(2, required=True)
pageSize = _messages.IntegerField(3, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(4)
class CloudbuildProjectsBuildsCancelRequest(_messages.Message):
"""A CloudbuildProjectsBuildsCancelRequest object.
Fields:
cancelBuildRequest: A CancelBuildRequest resource to be passed as the
request body.
id: ID of the build.
projectId: ID of the project.
"""
cancelBuildRequest = _messages.MessageField('CancelBuildRequest', 1)
id = _messages.StringField(2, required=True)
projectId = _messages.StringField(3, required=True)
class CloudbuildProjectsBuildsCreateRequest(_messages.Message):
"""A CloudbuildProjectsBuildsCreateRequest object.
Fields:
build: A Build resource to be passed as the request body.
projectId: ID of the project.
"""
build = _messages.MessageField('Build', 1)
projectId = _messages.StringField(2, required=True)
class CloudbuildProjectsBuildsGetRequest(_messages.Message):
"""A CloudbuildProjectsBuildsGetRequest object.
Fields:
id: ID of the build.
projectId: ID of the project.
"""
id = _messages.StringField(1, required=True)
projectId = _messages.StringField(2, required=True)
class CloudbuildProjectsBuildsListRequest(_messages.Message):
"""A CloudbuildProjectsBuildsListRequest object.
Fields:
filter: The raw filter text to constrain the results.
pageSize: Number of results to return in the list.
pageToken: Token to provide to skip to a particular spot in the list.
projectId: ID of the project.
"""
filter = _messages.StringField(1)
pageSize = _messages.IntegerField(2, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(3)
projectId = _messages.StringField(4, required=True)
class CloudbuildProjectsTriggersCreateRequest(_messages.Message):
"""A CloudbuildProjectsTriggersCreateRequest object.
Fields:
buildTrigger: A BuildTrigger resource to be passed as the request body.
projectId: ID of the project for which to configure automatic builds.
"""
buildTrigger = _messages.MessageField('BuildTrigger', 1)
projectId = _messages.StringField(2, required=True)
class CloudbuildProjectsTriggersDeleteRequest(_messages.Message):
"""A CloudbuildProjectsTriggersDeleteRequest object.
Fields:
projectId: ID of the project that owns the trigger.
triggerId: ID of the BuildTrigger to delete.
"""
projectId = _messages.StringField(1, required=True)
triggerId = _messages.StringField(2, required=True)
class CloudbuildProjectsTriggersGetRequest(_messages.Message):
"""A CloudbuildProjectsTriggersGetRequest object.
Fields:
projectId: ID of the project that owns the trigger.
triggerId: ID of the BuildTrigger to get.
"""
projectId = _messages.StringField(1, required=True)
triggerId = _messages.StringField(2, required=True)
class CloudbuildProjectsTriggersListRequest(_messages.Message):
"""A CloudbuildProjectsTriggersListRequest object.
Fields:
projectId: ID of the project for which to list BuildTriggers.
"""
projectId = _messages.StringField(1, required=True)
class CloudbuildProjectsTriggersPatchRequest(_messages.Message):
"""A CloudbuildProjectsTriggersPatchRequest object.
Fields:
buildTrigger: A BuildTrigger resource to be passed as the request body.
projectId: ID of the project that owns the trigger.
triggerId: ID of the BuildTrigger to update.
"""
buildTrigger = _messages.MessageField('BuildTrigger', 1)
projectId = _messages.StringField(2, required=True)
triggerId = _messages.StringField(3, required=True)
class Empty(_messages.Message):
"""A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance: service Foo {
rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
JSON representation for `Empty` is empty JSON object `{}`.
"""
class FileHashes(_messages.Message):
"""Container message for hashes of byte content of files, used in
SourceProvenance messages to verify integrity of source input to the build.
Fields:
fileHash: Collection of file hashes.
"""
fileHash = _messages.MessageField('Hash', 1, repeated=True)
class Hash(_messages.Message):
"""Container message for hash values.
Enums:
TypeValueValuesEnum: The type of hash that was performed.
Fields:
type: The type of hash that was performed.
value: The hash value.
"""
class TypeValueValuesEnum(_messages.Enum):
"""The type of hash that was performed.
Values:
NONE: No hash requested.
SHA256: Use a sha256 hash.
"""
NONE = 0
SHA256 = 1
type = _messages.EnumField('TypeValueValuesEnum', 1)
value = _messages.BytesField(2)
class ListBuildTriggersResponse(_messages.Message):
"""Response containing existing BuildTriggers.
Fields:
triggers: BuildTriggers for the project, sorted by create_time descending.
"""
triggers = _messages.MessageField('BuildTrigger', 1, repeated=True)
class ListBuildsResponse(_messages.Message):
"""Response including listed builds.
Fields:
builds: Builds will be sorted by create_time, descending.
nextPageToken: Token to receive the next page of results.
"""
builds = _messages.MessageField('Build', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class ListOperationsResponse(_messages.Message):
"""The response message for Operations.ListOperations.
Fields:
nextPageToken: The standard List next-page token.
operations: A list of operations that matches the specified filter in the
request.
"""
nextPageToken = _messages.StringField(1)
operations = _messages.MessageField('Operation', 2, repeated=True)
class Operation(_messages.Message):
"""This resource represents a long-running operation that is the result of a
network API call.
Messages:
MetadataValue: Service-specific metadata associated with the operation.
It typically contains progress information and common metadata such as
create time. Some services might not provide such metadata. Any method
that returns a long-running operation should document the metadata type,
if any.
ResponseValue: The normal response of the operation in case of success.
If the original method returns no data on success, such as `Delete`, the
response is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
Fields:
done: If the value is `false`, it means the operation is still in
progress. If true, the operation is completed, and either `error` or
`response` is available.
error: The error result of the operation in case of failure or
cancellation.
metadata: Service-specific metadata associated with the operation. It
typically contains progress information and common metadata such as
create time. Some services might not provide such metadata. Any method
that returns a long-running operation should document the metadata type,
if any.
name: The server-assigned name, which is only unique within the same
service that originally returns it. If you use the default HTTP mapping,
the `name` should have the format of `operations/some/unique/name`.
response: The normal response of the operation in case of success. If the
original method returns no data on success, such as `Delete`, the
response is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class MetadataValue(_messages.Message):
"""Service-specific metadata associated with the operation. It typically
contains progress information and common metadata such as create time.
Some services might not provide such metadata. Any method that returns a
long-running operation should document the metadata type, if any.
Messages:
AdditionalProperty: An additional property for a MetadataValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
"""An additional property for a MetadataValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
@encoding.MapUnrecognizedFields('additionalProperties')
class ResponseValue(_messages.Message):
"""The normal response of the operation in case of success. If the
original method returns no data on success, such as `Delete`, the response
is `google.protobuf.Empty`. If the original method is standard
`Get`/`Create`/`Update`, the response should be the resource. For other
methods, the response should have the type `XxxResponse`, where `Xxx` is
the original method name. For example, if the original method name is
`TakeSnapshot()`, the inferred response type is `TakeSnapshotResponse`.
Messages:
AdditionalProperty: An additional property for a ResponseValue object.
Fields:
additionalProperties: Properties of the object. Contains field @type
with type URL.
"""
class AdditionalProperty(_messages.Message):
"""An additional property for a ResponseValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
done = _messages.BooleanField(1)
error = _messages.MessageField('Status', 2)
metadata = _messages.MessageField('MetadataValue', 3)
name = _messages.StringField(4)
response = _messages.MessageField('ResponseValue', 5)
class RepoSource(_messages.Message):
"""RepoSource describes the location of the source in a Google Cloud Source
Repository.
Fields:
branchName: Name of the branch to build.
commitSha: Explicit commit SHA to build.
projectId: ID of the project that owns the repo. If omitted, the project
ID requesting the build is assumed.
repoName: Name of the repo. If omitted, the name "default" is assumed.
tagName: Name of the tag to build.
"""
branchName = _messages.StringField(1)
commitSha = _messages.StringField(2)
projectId = _messages.StringField(3)
repoName = _messages.StringField(4)
tagName = _messages.StringField(5)
class Results(_messages.Message):
"""Results describes the artifacts created by the build pipeline.
Fields:
buildStepImages: List of build step digests, in order corresponding to
build step indices.
images: Images that were built as a part of the build.
"""
buildStepImages = _messages.StringField(1, repeated=True)
images = _messages.MessageField('BuiltImage', 2, repeated=True)
class Source(_messages.Message):
"""Source | |
<filename>src/reducer/bohm_test.py
import itertools
import hypothesis
import hypothesis.strategies as s
import pytest
from pomagma.reducer import bohm
from pomagma.reducer.bohm import (CB, CI, KI, B, C, I, K, S, Y, app, false,
is_linear, is_normal, join, polish_simplify,
print_tiny, sexpr_simplify, true,
try_decide_equal, try_decide_less,
try_decide_less_weak)
from pomagma.reducer.syntax import (ABS, APP, BOT, CODE, EVAL, IVAR, JOIN,
NVAR, QAPP, QEQUAL, QLESS, QQUOTE, QUOTE,
TOP, Term, polish_print, quoted_vars,
sexpr_parse, sexpr_print)
from pomagma.reducer.testing import iter_equations
from pomagma.util.testing import for_each, xfail_if_not_implemented
pretty = sexpr_print
i0 = IVAR(0)
i1 = IVAR(1)
i2 = IVAR(2)
i3 = IVAR(3)
x = NVAR('x')
y = NVAR('y')
z = NVAR('z')
delta = ABS(APP(i0, i0))
ACTIVE_ATOMS = [Y, EVAL, QAPP, QQUOTE, QLESS, QEQUAL]
s_atoms = s.one_of(
s.sampled_from([TOP, BOT]),
s.just(i0),
s.just(i1),
s.just(i2),
s.just(i3),
s.sampled_from([x, y, z]),
s.sampled_from(ACTIVE_ATOMS),
)
def s_terms_extend(terms):
return s.one_of(
s.builds(app, terms, terms),
s.builds(
bohm.abstract,
terms.filter(lambda c: i0 not in quoted_vars(c)),
),
s.builds(join, terms, terms),
s.builds(QUOTE, terms),
)
s_terms = s.recursive(s_atoms, s_terms_extend, max_leaves=32)
s_quoted = s.builds(QUOTE, s_terms)
def test_constants():
assert app(I, x) is x
assert app(app(K, x), y) is x
assert app(app(app(B, x), y), z) is APP(x, APP(y, z))
assert app(app(app(C, x), y), z) is APP(APP(x, z), y)
assert app(app(app(S, x), y), z) is APP(APP(x, z), APP(y, z))
assert KI is app(K, I)
assert CI is app(C, I)
assert CB is app(C, B)
# ----------------------------------------------------------------------------
# Functional programming
INCREMENT_RANK_EXAMPLES = [
(TOP, TOP),
(BOT, BOT),
(x, x),
(y, y),
(i0, i1),
(i1, i2),
(i2, i3),
(ABS(i0), ABS(i0)),
(ABS(i1), ABS(i2)),
(ABS(i2), ABS(i3)),
(ABS(ABS(i0)), ABS(ABS(i0))),
(ABS(ABS(i1)), ABS(ABS(i1))),
(ABS(ABS(i2)), ABS(ABS(i3))),
(APP(i0, i1), APP(i1, i2)),
(ABS(APP(i0, i0)), ABS(APP(i0, i0))),
(ABS(APP(i1, i2)), ABS(APP(i2, i3))),
(JOIN(i0, i1), JOIN(i1, i2)),
(QUOTE(i0), QUOTE(i1)),
(EVAL, EVAL),
(QAPP, QAPP),
(QQUOTE, QQUOTE),
(QLESS, QLESS),
(QEQUAL, QEQUAL),
]
@for_each(INCREMENT_RANK_EXAMPLES)
def test_increment_rank(term, expected):
assert bohm.increment_rank(term) is expected
DECREMENT_RANK_EXAMPLES = [
(TOP, TOP),
(BOT, BOT),
(x, x),
(y, y),
(i1, i0),
(i2, i1),
(i3, i2),
(APP(i1, i2), APP(i0, i1)),
(ABS(APP(i0, i0)), ABS(APP(i0, i0))),
(ABS(APP(i2, i3)), ABS(APP(i1, i2))),
(JOIN(i1, i2), JOIN(i0, i1)),
(QUOTE(i1), QUOTE(i0)),
(EVAL, EVAL),
(QAPP, QAPP),
(QQUOTE, QQUOTE),
(QLESS, QLESS),
(QEQUAL, QEQUAL),
]
@for_each(DECREMENT_RANK_EXAMPLES)
def test_decrement_rank(term, expected):
assert bohm.decrement_rank(term) is expected
@hypothesis.given(s_terms)
def test_decrement_increment_rank(term):
assert bohm.decrement_rank(bohm.increment_rank(term)) is term
IS_LINEAR_EXAMPLES = [
(TOP, True),
(BOT, True),
(x, True),
(y, True),
(i0, True),
(i1, True),
(APP(i0, i0), True),
(APP(i0, i1), True),
(APP(i1, i0), True),
(APP(i1, i1), True),
(ABS(APP(i0, i0)), False),
(ABS(APP(i0, i1)), True),
(ABS(APP(i1, i0)), True),
(ABS(APP(i1, i1)), True),
(JOIN(ABS(i0), ABS(APP(i0, x))), True),
(JOIN(ABS(i0), ABS(APP(i0, i0))), False),
(JOIN(ABS(i0), ABS(ABS(i1))), True),
(JOIN(ABS(ABS(i0)), ABS(ABS(i1))), True),
(QUOTE(ABS(APP(i0, i0))), True),
(Y, False),
(EVAL, False),
(QAPP, True),
(QQUOTE, True),
(QLESS, True),
(QEQUAL, True),
]
@for_each(IS_LINEAR_EXAMPLES)
def test_is_linear(term, expected):
assert is_linear(term) is expected
PERMUTE_RANK_EXAMPLES = [
('(0 1 2 3)', 0, '(0 1 2 3)'),
('(0 1 2 3)', 1, '(1 0 2 3)'),
('(0 1 2 3)', 2, '(1 2 0 3)'),
('(0 1 2 3)', 3, '(1 2 3 0)'),
('(0 1 2 3)', 4, '(1 2 3 4)'),
('(0 1 2 3)', 5, '(1 2 3 4)'),
('(0 1 x y EVAL)', 1, '(1 0 x y EVAL)'),
('(ABS (0 1 2 3)) ', 0, '(ABS (0 1 2 3))'),
('(ABS (0 1 2 3)) ', 1, '(ABS (0 2 1 3))'),
('(ABS (0 1 2 3)) ', 2, '(ABS (0 2 3 1))'),
('(ABS (0 1 2 3)) ', 3, '(ABS (0 2 3 4))'),
('(ABS (ABS (0 1 2 3))) ', 0, '(ABS (ABS (0 1 2 3)))'),
('(ABS (ABS (0 1 2 3))) ', 1, '(ABS (ABS (0 1 3 2)))'),
('(ABS (ABS (0 1 2 3))) ', 2, '(ABS (ABS (0 1 3 4)))'),
('(JOIN (x 0 1 2) (y 0 1 2))', 0, '(JOIN (x 0 1 2) (y 0 1 2))'),
('(JOIN (x 0 1 2) (y 0 1 2))', 1, '(JOIN (x 1 0 2) (y 1 0 2))'),
('(JOIN (x 0 1 2) (y 0 1 2))', 2, '(JOIN (x 1 2 0) (y 1 2 0))'),
('(QUOTE (0 1 2))', 0, '(QUOTE (0 1 2))'),
('(QUOTE (0 1 2))', 1, '(QUOTE (1 0 2))'),
('(QUOTE (0 1 2))', 2, '(QUOTE (1 2 0))'),
]
@for_each(PERMUTE_RANK_EXAMPLES)
def test_permute_rank(term, rank, expected):
actual = sexpr_print(bohm.permute_rank(sexpr_parse(term), rank))
assert actual == expected
SUBSTITUTE_EXAMPLES = [
(TOP, BOT, TOP),
(BOT, TOP, BOT),
(x, TOP, x),
(i0, x, x),
(i1, x, i0),
(i2, x, i1),
(ABS(i0), x, ABS(i0)),
(ABS(i1), x, ABS(x)),
(ABS(i2), x, ABS(i1)),
(ABS(i1), i0, ABS(i1)),
(ABS(i1), i1, ABS(i2)),
(ABS(APP(i0, i0)), x, ABS(APP(i0, i0))),
(ABS(APP(i0, i1)), x, ABS(APP(i0, x))),
(ABS(APP(i0, i2)), x, ABS(APP(i0, i1))),
(ABS(APP(i0, i1)), i0, ABS(APP(i0, i1))),
(ABS(APP(i0, i1)), i1, ABS(APP(i0, i2))),
(ABS(ABS(APP(i2, APP(i1, i0)))), x, ABS(ABS(APP(x, APP(i1, i0))))),
(ABS(ABS(APP(i2, APP(i1, i0)))), i0, ABS(ABS(APP(i2, APP(i1, i0))))),
(ABS(ABS(APP(i2, APP(i1, i0)))), i1, ABS(ABS(APP(i3, APP(i1, i0))))),
(APP(i0, i1), x, APP(x, i0)),
(JOIN(i0, i1), x, JOIN(i0, x)),
(QUOTE(i0), x, QUOTE(x)),
(QUOTE(i1), x, QUOTE(i0)),
(EVAL, x, EVAL),
(QAPP, x, QAPP),
(QQUOTE, x, QQUOTE),
(QLESS, x, QLESS),
(QEQUAL, x, QEQUAL),
]
@for_each(SUBSTITUTE_EXAMPLES)
def test_substitute(body, value, expected):
expected = pretty(expected)
actual = pretty(bohm.substitute(body, value, 0, False))
assert actual == expected
APP_EXAMPLES = [
(TOP, TOP, TOP),
(TOP, BOT, TOP),
(TOP, x, TOP),
(TOP, i0, TOP),
(BOT, TOP, BOT),
(BOT, BOT, BOT),
(BOT, x, BOT),
(BOT, i0, BOT),
(x, TOP, APP(x, TOP)),
(x, BOT, APP(x, BOT)),
(x, x, APP(x, x)),
(x, i0, APP(x, i0)),
(i0, TOP, APP(i0, TOP)),
(i0, BOT, APP(i0, BOT)),
(i0, x, APP(i0, x)),
(i0, i0, APP(i0, i0)),
(ABS(i1), TOP, i0),
(ABS(i1), BOT, i0),
(ABS(i1), x, i0),
(ABS(i0), TOP, TOP),
(ABS(i0), BOT, BOT),
(ABS(i0), x, x),
(ABS(APP(i0, y)), TOP, TOP),
(ABS(APP(i0, y)), BOT, BOT),
(ABS(APP(i0, y)), x, APP(x, y)),
(ABS(APP(i0, i1)), x, APP(x, i0)),
(JOIN(x, y), z, JOIN(APP(x, z), APP(y, z))),
(JOIN(ABS(i0), x), TOP, TOP),
(JOIN(ABS(i0), x), BOT, APP(x, BOT)),
(Y, TOP, TOP),
(Y, BOT, BOT),
(Y, Y, BOT),
(Y, x, APP(Y, x)),
(Y, ABS(x), APP(Y, ABS(x))),
(QUOTE(TOP), x, APP(QUOTE(TOP), x)),
(EVAL, TOP, TOP),
(EVAL, BOT, BOT),
(EVAL, QUOTE(x), x),
(EVAL, QUOTE(i0), i0),
(EVAL, x, APP(EVAL, x)),
(QAPP, TOP, TOP),
(QAPP, BOT, APP(QAPP, BOT)),
(QAPP, QUOTE(x), APP(QAPP, QUOTE(x))),
(QAPP, x, APP(QAPP, x)),
(APP(QAPP, TOP), TOP, TOP),
(APP(QAPP, TOP), BOT, TOP),
(APP(QAPP, TOP), QUOTE(y), TOP),
(APP(QAPP, TOP), y, TOP),
(APP(QAPP, BOT), TOP, TOP),
(APP(QAPP, BOT), BOT, BOT),
(APP(QAPP, BOT), QUOTE(y), BOT),
(APP(QAPP, BOT), y, APP(APP(QAPP, BOT), y)),
(APP(QAPP, QUOTE(x)), TOP, TOP),
(APP(QAPP, QUOTE(x)), BOT, BOT),
(APP(QAPP, QUOTE(x)), QUOTE(y), QUOTE(APP(x, y))),
(APP(QAPP, QUOTE(x)), y, APP(APP(QAPP, QUOTE(x)), y)),
(APP(QAPP, x), TOP, TOP),
(APP(QAPP, x), BOT, APP(APP(QAPP, x), BOT)),
(APP(QAPP, x), QUOTE(y), APP(APP(QAPP, x), QUOTE(y))),
(APP(QAPP, x), y, APP(APP(QAPP, x), y)),
(QQUOTE, TOP, TOP),
(QQUOTE, BOT, BOT),
(QQUOTE, QUOTE(x), QUOTE(QUOTE(x))),
(QQUOTE, x, APP(QQUOTE, x)),
(APP(QLESS, TOP), TOP, TOP),
(APP(QLESS, TOP), BOT, TOP),
(APP(QLESS, TOP), QUOTE(y), TOP),
(APP(QLESS, TOP), y, TOP),
(APP(QLESS, BOT), TOP, TOP),
(APP(QLESS, BOT), BOT, BOT),
(APP(QLESS, BOT), QUOTE(y), BOT),
(APP(QLESS, BOT), i1, APP(APP(QLESS, BOT), i1)),
(APP(QLESS, QUOTE(i0)), TOP, TOP),
(APP(QLESS, QUOTE(i0)), BOT, BOT),
(APP(QLESS, QUOTE(i0)), QUOTE(i1), false),
(APP(QLESS, QUOTE(i0)), QUOTE(i0), true),
(APP(QLESS, QUOTE(i0)), i1, APP(APP(QLESS, QUOTE(i0)), i1)),
(APP(QLESS, i0), TOP, TOP),
(APP(QLESS, i0), BOT, APP(APP(QLESS, i0), BOT)),
(APP(QLESS, i0), QUOTE(i1), APP(APP(QLESS, i0), QUOTE(i1))),
(APP(QLESS, i0), i1, APP(APP(QLESS, i0), i1)),
(APP(QEQUAL, TOP), TOP, TOP),
(APP(QEQUAL, TOP), BOT, TOP),
(APP(QEQUAL, TOP), QUOTE(i1), TOP),
(APP(QEQUAL, TOP), i1, TOP),
(APP(QEQUAL, BOT), TOP, TOP),
(APP(QEQUAL, BOT), BOT, BOT),
(APP(QEQUAL, BOT), QUOTE(i1), BOT),
(APP(QEQUAL, BOT), i1, APP(APP(QEQUAL, BOT), i1)),
(APP(QEQUAL, QUOTE(i0)), TOP, TOP),
(APP(QEQUAL, QUOTE(i0)), BOT, BOT),
(APP(QEQUAL, QUOTE(i0)), QUOTE(i1), false),
(APP(QEQUAL, QUOTE(i0)), QUOTE(i0), true),
(APP(QEQUAL, QUOTE(i0)), i1, APP(APP(QEQUAL, QUOTE(i0)), i1)),
(APP(QEQUAL, i0), TOP, TOP),
(APP(QEQUAL, i0), BOT, APP(APP(QEQUAL, i0), BOT)),
(APP(QEQUAL, i0), QUOTE(i1), APP(APP(QEQUAL, i0), QUOTE(i1))),
(APP(QEQUAL, i0), i1, APP(APP(QEQUAL, i0), i1)),
(C, B, ABS(ABS(ABS(APP(i1, APP(i2, i0)))))),
(S, I, ABS(ABS(APP(i0, APP(i1, i0))))),
(app(S, I), I, ABS(APP(i0, i0))),
]
@for_each(APP_EXAMPLES)
def test_app(fun, arg, expected):
with xfail_if_not_implemented():
assert pretty(app(fun, arg)) == pretty(expected)
ABSTRACT_EXAMPLES = [
(TOP, TOP),
(BOT, BOT),
(x, ABS(x)),
(i0, ABS(i0)),
(i1, ABS(i1)),
(ABS(i0), ABS(ABS(i0))),
(APP(i0, x), ABS(APP(i0, x))),
(APP(i0, i0), ABS(APP(i0, i0))),
(APP(x, i0), x),
(JOIN(i0, x), JOIN(ABS(i0), ABS(x))),
(QUOTE(i1), ABS(QUOTE(i1))),
(APP(QUOTE(i1), i0), QUOTE(i0)),
(EVAL, ABS(EVAL)),
(QAPP, ABS(QAPP)),
(QQUOTE, ABS(QQUOTE)),
]
@for_each(ABSTRACT_EXAMPLES)
def test_abstract(term, expected):
assert bohm.abstract(term) is expected
@hypothesis.given(s_terms)
def test_abstract_eta(term):
assert bohm.abstract(app(bohm.increment_rank(term), i0)) is term
@hypothesis.given(s_terms)
@hypothesis.example(join(TOP, APP(QUOTE(i1), i0)))
def test_app_abstract(term):
hypothesis.assume(i0 not in quoted_vars(term))
assert app(bohm.increment_rank(bohm.abstract(term)), i0) is term
QABSTRACT_EXAMPLES = [
(i0, EVAL),
(i1, ABS(i1)),
]
@for_each(QABSTRACT_EXAMPLES)
def test_qabstract(term, expected):
assert bohm.qabstract(term) is expected
@for_each([
(x, x, ABS(i0)),
(x, y, ABS(y)),
(x, i0, ABS(i1)),
(x, ABS(APP(i0, x)), ABS(ABS(APP(i0, i1)))),
])
def test_nominal_abstract(var, body, expected):
assert | |
<reponame>sergey-lebedev/concor<filename>bots/greedy_bot.py
from algorithms import *
import copy
DEBUG = False
inf = float("infinity")
def branch_generator(game_state, adjacency_list, owner, alpha, beta, is_final):
pruning = False
# branch init
branch = {}
branch['nodes'] = []
# data gathering from game state
player = game_state['player']
player_list = game_state['player_list']
wall_list = game_state['wall_list']
# player detection
current_player = player_list.index(player)
#print 'player_number: ', current_player
next_player = (current_player + 1) % len(player_list)
# old
loc = player['location']
target_loc = player['target_loc']
#opponent_list
opponent_list = [item for item in player_list if item != player]
#neigbors
available_positions = available_positions_generator(loc, wall_list, player_list, adjacency_list)
neighbors = [location for location in available_positions[loc]]
#possibility matrix
p = w2p(wall_list)
#actions
action_list = []
#movement
distances = []
for opponent in opponent_list:
#print opponent
opponent_available_positions =\
available_positions_generator(opponent['location'],
wall_list,
player_list,
adjacency_list)
step = bfs_light(opponent['location'],
opponent_available_positions,
opponent['target_loc'])
#print step
distances.append(step)
distance = min(distances)
#trace = list(subtrace)
#print distance
for neighbor in neighbors:
# leafs don't need game state copy
if is_final:
current_game_state = {}
else:
current_game_state = copy.deepcopy(game_state)
step = bfs_light(neighbor, available_positions, target_loc)
#print step
if (step != None) and (distance != None):
value = distance - step
else:
value = 0
#print 'cost: ', value
#print 'estimate: ', estimate
action = {'action_type': 'movement', 'location': neighbor, 'cost': value}
##print action
(x, y) = neighbor
if not is_final:
current_game_state['player_list'][current_player]['location'] = neighbor
current_game_state['player'] = player_list[next_player]
branch['nodes'].append({'action': action, 'game_state': current_game_state})
if is_final:
pruning = alpha_beta_pruning(alpha, beta, value, owner)
if pruning:
break
# cost evaluation
# win move
intersection = set(neighbors).intersection(set(target_loc))
if intersection:
# leafs don't need game state copy
if is_final:
current_game_state = {}
else:
current_game_state = copy.deepcopy(game_state)
location = list(intersection)[0]
value = inf
action = {'action_type': 'movement', 'location': location, 'cost': value}
action_list.append(action)
if not is_final:
current_game_state['player_list'][current_player]['location'] = location
current_game_state['player'] = player_list[next_player]
branch['nodes'].append({'action': action, 'game_state': current_game_state})
# node pruning
if is_final:
pruning = alpha_beta_pruning(alpha, beta, value, owner)
# building
if (player['amount_of_walls'] > 0) and not pruning:
for location in p:
if p[location] and not pruning:
for wall_type in p[location]:
# leafs don't need game state copy
if is_final:
current_game_state = {}
else:
current_game_state = copy.deepcopy(game_state)
projected_wall_list = list(wall_list)
wall = {'type': wall_type,
'location': location,
'player_id': player['id']
}
projected_wall_list.append(wall)
distances = []
for opponent in opponent_list:
projected_available_positions =\
available_positions_generator(opponent['location'], projected_wall_list,
player_list,
adjacency_list)
step = bfs_light(opponent['location'],
projected_available_positions,
opponent['target_loc'])
distances.append(step)
distance = min(distances)
projected_available_positions =\
available_positions_generator(loc,
projected_wall_list,
player_list,
adjacency_list)
step = bfs_light(loc,
projected_available_positions,
target_loc)
if (step != None) and (distance != None):
value = distance - step
#print 'cost: ', value
#print 'estimate: ', estimate
action = {'action_type': 'building', 'wall': wall, 'cost': value}
#print action
if not is_final:
current_game_state['wall_list'].append(wall)
current_game_state['player_list'][current_player]['amount_of_walls'] -= 1
current_game_state['player'] = player_list[next_player]
branch['nodes'].append({'action': action, 'game_state': current_game_state})
action_list.append(action)
# node pruning
if is_final:
pruning = alpha_beta_pruning(alpha, beta, value, owner)
if pruning:
break
return branch
def turn(player, player_list, wall_list, available_positions, adjacency_list):
# current game state
game_state = {}
game_state['player'] = player
game_state['wall_list'] = wall_list
game_state['player_list'] = player_list
# bot stupefying
opponents_walls_counter = 0
for opponent in player_list:
if opponent != player:
opponents_walls_counter += opponent['amount_of_walls']
if player['amount_of_walls'] != 0:
if opponents_walls_counter != 0:
depth = 2
else:
depth = 2
else:
if opponents_walls_counter != 0:
depth = 0
else:
depth = 6
# game tree
index = 0
game_tree = {}
root = {index: {'parent': None, 'child': [], 'game_state': game_state, 'expanded': False, 'initial': -inf, 'final': -inf, 'alpha': None, 'beta': None, 'owner': 'max', 'action': None, 'is_node': False}}
game_tree.update(root)
# game tree dfs
level = 0
stack = [index]
while (stack != []):
# get ancestor
parent = stack[-1]
#print 'parent: '
#print parent
if game_tree[parent]['expanded']:
stack.pop(-1)
level -= 1
else:
level += 1
#print 'stack:'
#print stack
current_game_state = game_tree[parent]['game_state']
#print current_game_state
# owner detector
if (level % len(player_list) == 0):
owner = 'max'
else:
owner = 'min'
# final branches detection
if (level < depth):
is_final = False
else:
is_final = True
if not game_tree[parent]['expanded']:
# brach generator
if owner == 'max':
if game_tree[parent]['owner'] == 'min':
initial = final = -inf
elif owner == 'min':
if game_tree[parent]['owner'] == 'max':
initial = final = inf
if DEBUG:
print owner
print game_tree[parent]['owner']
# in depth params transition
if parent != 0:
grandparent = game_tree[parent]['parent']
alpha = game_tree[grandparent]['alpha']
beta = game_tree[grandparent]['beta']
game_tree[parent]['alpha'] = alpha
game_tree[parent]['beta'] = beta
if DEBUG:
print "in depth params transition"
print "from", grandparent, "to", parent
print game_tree[grandparent]['owner']
print game_tree[parent]['owner']
print "alpha:", alpha, "beta:", beta
else:
if owner == 'max':
if game_tree[parent]['owner'] == 'min':
alpha = None
beta = game_tree[parent]['initial']
elif owner == 'min':
if game_tree[parent]['owner'] == 'max':
alpha = game_tree[parent]['initial']
beta = None
branch = branch_generator(current_game_state, adjacency_list, game_tree[parent]['owner'], game_tree[parent]['alpha'], game_tree[parent]['beta'], is_final)
#print branch['nodes']
child_list = []
weighted_subbranches = []
for state in branch['nodes']:
index += 1
action = state['action']
value = action['cost']
#print action
node_game_state = state['game_state']
node = {index: {'parent': parent, 'child': [], 'game_state': node_game_state, 'action': action, 'expanded': False, 'initial': initial, 'final': final, 'alpha': alpha, 'beta': beta, 'owner': owner, 'is_node': False}}
game_tree.update(node)
#print node
child_list.append(index)
if (level < depth) and (abs(value) != inf):
weighted_subbranches.append((index, value))
else:
game_tree[index]['is_node'] = True
if DEBUG:
print 'node:', index, ' termination'
if owner == 'max':
initial = final = - value
#print initial
game_tree[index]['initial'] = initial
game_tree[index]['final'] = final
if game_tree[parent]['owner'] == 'min':
if game_tree[parent]['initial'] > final:
game_tree[parent]['initial'] = final
if (final == -inf):
game_tree[parent]['alpha'] = -inf
elif owner == 'min':
initial = final = value
#print initial
game_tree[index]['initial'] = initial
game_tree[index]['final'] = final
if game_tree[parent]['owner'] == 'max':
if game_tree[parent]['initial'] < final:
game_tree[parent]['initial'] = final
if (final == inf):
game_tree[parent]['beta'] = final
game_tree[parent]['child'].extend(child_list)
game_tree[parent]['expanded'] = True
if DEBUG:
print 'subbranches: '
print weighted_subbranches
# ordering subbranches by preliminary evaluation
weighted_subbranches = sorted(weighted_subbranches, key=lambda subbranch: subbranch[1], reverse=True)
if DEBUG: print weighted_subbranches
subbranches = [subbranch for (subbranch, weight) in weighted_subbranches]
# stack forming
subbranches.reverse()
stack.extend(subbranches)
else:
initial = game_tree[parent]['initial']
final = initial
#print 'final: ', final
game_tree[parent]['final'] = final
grandparent = game_tree[parent]['parent']
if parent != 0:
if DEBUG:
print game_tree[parent]['owner']
print game_tree[grandparent]['owner']
game_tree[parent]['action']['cost'] = final
grandparent = game_tree[parent]['parent']
initial = game_tree[grandparent]['initial']
# from depth params transition
if owner == 'max':
#print game_tree[parent]['final']
if game_tree[grandparent]['owner'] == 'min':
if (initial >= final):
game_tree[grandparent]['initial'] = final
if final != inf:
game_tree[grandparent]['alpha'] = final
else:
game_tree[grandparent]['alpha'] = None
elif owner == 'min':
#print game_tree[parent]['final']
if game_tree[grandparent]['owner'] == 'max':
if (initial < final):
game_tree[grandparent]['initial'] = final
if final != -inf:
game_tree[grandparent]['beta'] = final
else:
game_tree[grandparent]['beta'] = None
# branch pruning
if parent != 0:
grandparent = game_tree[parent]['parent']
value = game_tree[parent]['initial']
alpha = game_tree[parent]['alpha']
beta = game_tree[parent]['beta']
if DEBUG:
print 'owner:', owner
print 'grandparent owner:', game_tree[grandparent]['owner']
print 'parent owner:', game_tree[parent]['owner']
print 'value:', value
print 'alpha:', alpha
print 'beta:', beta
if game_tree[grandparent]['owner'] == 'max':
#print game_tree[parent]['final']
if game_tree[parent]['owner'] == 'min':
# branch pruning
if alpha != None:
if (value > alpha):
if DEBUG:
print "alpha pruning"
print parent
print game_tree[parent]['child']
print game_tree[grandparent]['child']
print stack
for child in game_tree[grandparent]['child']:
if DEBUG:
print child, game_tree[child]['expanded']
if not game_tree[child]['expanded'] and child in stack:
if DEBUG:
print "pruning node", child
stack.remove(child)
game_tree[grandparent]['child'].remove(child)
if game_tree[grandparent]['owner'] == 'min':
#print game_tree[parent]['final']
if game_tree[parent]['owner'] == 'max':
if beta != None:
if (value < beta) or (beta == inf):
if DEBUG:
print "beta pruning"
print parent
print game_tree[parent]['child']
print game_tree[grandparent]['child']
print stack
for child in game_tree[grandparent]['child']:
if DEBUG:
print child, game_tree[child]['expanded']
if not game_tree[child]['expanded'] and child in stack:
if DEBUG:
print "pruning node", child
stack.remove(child)
game_tree[grandparent]['child'].remove(child)
if DEBUG:
print "debug output. game tree structure."
sequence = []
sequence.append(0)
while (sequence != []):
node = sequence.pop(0)
parent = game_tree[node]['parent']
child_list = game_tree[node]['child']
initial = game_tree[node]['initial']
final = game_tree[node]['final']
alpha = game_tree[node]['alpha']
beta = game_tree[node]['beta']
owner = game_tree[node]['owner']
is_node = game_tree[node]['is_node']
expanded = game_tree[node]['expanded']
print "node: %s; expanded: %s, is_node: %s; parent: %s; child: %s; initial: %s; final: %s; alpha: %s; beta: %s; owner: %s;"\
%(node, expanded, is_node, parent, child_list, initial, final, alpha, beta, owner)
#print 'action: ', game_tree[node]['action']
sequence.extend(child_list)
# action select
level = 0
action_list = []
#print 'actions: '
#print 'level: ', level
#print game_tree[level]['child']
for child in game_tree[level]['child']:
action_list.append(game_tree[child]['action'])
#print | |
self._GetService( service_key )
return service.GetName()
def GetService( self, service_key ):
with self._lock:
return self._GetService( service_key )
def GetServiceType( self, service_key ):
with self._lock:
return self._GetService( service_key ).GetServiceType()
def GetServiceKeyFromName( self, allowed_types, service_name ):
with self._lock:
for service in self._services_sorted:
if service.GetServiceType() in allowed_types and service.GetName() == service_name:
return service.GetServiceKey()
raise HydrusExceptions.DataMissing()
def GetServiceKeys( self, desired_types = HC.ALL_SERVICES ):
with self._lock:
filtered_service_keys = [ service_key for ( service_key, service ) in list(self._keys_to_services.items()) if service.GetServiceType() in desired_types ]
return filtered_service_keys
def GetServices( self, desired_types = HC.ALL_SERVICES, randomised = True ):
with self._lock:
def func( service ):
return service.GetServiceType() in desired_types
services = list(filter( func, self._services_sorted ))
if randomised:
random.shuffle( services )
return services
def RefreshServices( self ):
with self._lock:
services = self._controller.Read( 'services' )
self._SetServices( services )
def ServiceExists( self, service_key ):
with self._lock:
return service_key in self._keys_to_services
class ShortcutsManager( object ):
def __init__( self, controller ):
self._controller = controller
self._shortcuts = {}
self.RefreshShortcuts()
self._controller.sub( self, 'RefreshShortcuts', 'new_shortcuts' )
def GetCommand( self, shortcuts_names, shortcut ):
for name in shortcuts_names:
if name in self._shortcuts:
command = self._shortcuts[ name ].GetCommand( shortcut )
if command is not None:
if HG.gui_report_mode:
HydrusData.ShowText( 'command matched: ' + repr( command ) )
return command
return None
def RefreshShortcuts( self ):
self._shortcuts = {}
all_shortcuts = HG.client_controller.Read( 'serialisable_named', HydrusSerialisable.SERIALISABLE_TYPE_SHORTCUTS )
for shortcuts in all_shortcuts:
self._shortcuts[ shortcuts.GetName() ] = shortcuts
class TagCensorshipManager( object ):
def __init__( self, controller ):
self._controller = controller
self.RefreshData()
self._controller.sub( self, 'RefreshData', 'notify_new_tag_censorship' )
def _CensorshipMatches( self, tag, blacklist, censorships ):
if blacklist:
return not HydrusTags.CensorshipMatch( tag, censorships )
else:
return HydrusTags.CensorshipMatch( tag, censorships )
def GetInfo( self, service_key ):
if service_key in self._service_keys_to_info: return self._service_keys_to_info[ service_key ]
else: return ( True, set() )
def RefreshData( self ):
rows = self._controller.Read( 'tag_censorship' )
self._service_keys_to_info = { service_key : ( blacklist, censorships ) for ( service_key, blacklist, censorships ) in rows }
def FilterPredicates( self, service_key, predicates ):
for service_key_lookup in ( CC.COMBINED_TAG_SERVICE_KEY, service_key ):
if service_key_lookup in self._service_keys_to_info:
( blacklist, censorships ) = self._service_keys_to_info[ service_key_lookup ]
predicates = [ predicate for predicate in predicates if predicate.GetType() != HC.PREDICATE_TYPE_TAG or self._CensorshipMatches( predicate.GetValue(), blacklist, censorships ) ]
return predicates
def FilterStatusesToPairs( self, service_key, statuses_to_pairs ):
for service_key_lookup in ( CC.COMBINED_TAG_SERVICE_KEY, service_key ):
if service_key_lookup in self._service_keys_to_info:
( blacklist, censorships ) = self._service_keys_to_info[ service_key_lookup ]
new_statuses_to_pairs = HydrusData.default_dict_set()
for ( status, pairs ) in list(statuses_to_pairs.items()):
new_statuses_to_pairs[ status ] = { ( one, two ) for ( one, two ) in pairs if self._CensorshipMatches( one, blacklist, censorships ) and self._CensorshipMatches( two, blacklist, censorships ) }
statuses_to_pairs = new_statuses_to_pairs
return statuses_to_pairs
def FilterServiceKeysToStatusesToTags( self, service_keys_to_statuses_to_tags ):
if CC.COMBINED_TAG_SERVICE_KEY in self._service_keys_to_info:
( blacklist, censorships ) = self._service_keys_to_info[ CC.COMBINED_TAG_SERVICE_KEY ]
service_keys = list(service_keys_to_statuses_to_tags.keys())
for service_key in service_keys:
statuses_to_tags = service_keys_to_statuses_to_tags[ service_key ]
statuses = list(statuses_to_tags.keys())
for status in statuses:
tags = statuses_to_tags[ status ]
statuses_to_tags[ status ] = { tag for tag in tags if self._CensorshipMatches( tag, blacklist, censorships ) }
for ( service_key, ( blacklist, censorships ) ) in list(self._service_keys_to_info.items()):
if service_key == CC.COMBINED_TAG_SERVICE_KEY:
continue
if service_key in service_keys_to_statuses_to_tags:
statuses_to_tags = service_keys_to_statuses_to_tags[ service_key ]
statuses = list(statuses_to_tags.keys())
for status in statuses:
tags = statuses_to_tags[ status ]
statuses_to_tags[ status ] = { tag for tag in tags if self._CensorshipMatches( tag, blacklist, censorships ) }
return service_keys_to_statuses_to_tags
def FilterTags( self, service_key, tags ):
for service_key_lookup in ( CC.COMBINED_TAG_SERVICE_KEY, service_key ):
if service_key_lookup in self._service_keys_to_info:
( blacklist, censorships ) = self._service_keys_to_info[ service_key_lookup ]
tags = { tag for tag in tags if self._CensorshipMatches( tag, blacklist, censorships ) }
return tags
class TagParentsManager( object ):
def __init__( self, controller ):
self._controller = controller
self._dirty = False
self._refresh_job = None
self._service_keys_to_children_to_parents = collections.defaultdict( HydrusData.default_dict_list )
self._RefreshParents()
self._lock = threading.Lock()
self._controller.sub( self, 'NotifyNewParents', 'notify_new_parents' )
def _RefreshParents( self ):
service_keys_to_statuses_to_pairs = self._controller.Read( 'tag_parents' )
# first collapse siblings
sibling_manager = self._controller.tag_siblings_manager
collapsed_service_keys_to_statuses_to_pairs = collections.defaultdict( HydrusData.default_dict_set )
for ( service_key, statuses_to_pairs ) in list(service_keys_to_statuses_to_pairs.items()):
if service_key == CC.COMBINED_TAG_SERVICE_KEY: continue
for ( status, pairs ) in list(statuses_to_pairs.items()):
pairs = sibling_manager.CollapsePairs( service_key, pairs )
collapsed_service_keys_to_statuses_to_pairs[ service_key ][ status ] = pairs
# now collapse current and pending
service_keys_to_pairs_flat = HydrusData.default_dict_set()
for ( service_key, statuses_to_pairs ) in list(collapsed_service_keys_to_statuses_to_pairs.items()):
pairs_flat = statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ].union( statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] )
service_keys_to_pairs_flat[ service_key ] = pairs_flat
# now create the combined tag service
combined_pairs_flat = set()
for pairs_flat in list(service_keys_to_pairs_flat.values()):
combined_pairs_flat.update( pairs_flat )
service_keys_to_pairs_flat[ CC.COMBINED_TAG_SERVICE_KEY ] = combined_pairs_flat
#
service_keys_to_simple_children_to_parents = BuildServiceKeysToSimpleChildrenToParents( service_keys_to_pairs_flat )
self._service_keys_to_children_to_parents = BuildServiceKeysToChildrenToParents( service_keys_to_simple_children_to_parents )
def ExpandPredicates( self, service_key, predicates ):
if self._controller.new_options.GetBoolean( 'apply_all_parents_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
results = []
with self._lock:
for predicate in predicates:
results.append( predicate )
if predicate.GetType() == HC.PREDICATE_TYPE_TAG:
tag = predicate.GetValue()
parents = self._service_keys_to_children_to_parents[ service_key ][ tag ]
for parent in parents:
parent_predicate = ClientSearch.Predicate( HC.PREDICATE_TYPE_PARENT, parent )
results.append( parent_predicate )
return results
def ExpandTags( self, service_key, tags ):
if self._controller.new_options.GetBoolean( 'apply_all_parents_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
tags_results = set( tags )
for tag in tags:
tags_results.update( self._service_keys_to_children_to_parents[ service_key ][ tag ] )
return tags_results
def GetParents( self, service_key, tag ):
if self._controller.new_options.GetBoolean( 'apply_all_parents_to_all_services' ):
service_key = CC.COMBINED_TAG_SERVICE_KEY
with self._lock:
return self._service_keys_to_children_to_parents[ service_key ][ tag ]
def NotifyNewParents( self ):
with self._lock:
self._dirty = True
if self._refresh_job is not None:
self._refresh_job.Cancel()
self._refresh_job = self._controller.CallLater( 8.0, self.RefreshParentsIfDirty )
def RefreshParentsIfDirty( self ):
with self._lock:
if self._dirty:
self._RefreshParents()
self._dirty = False
class TagSiblingsManager( object ):
def __init__( self, controller ):
self._controller = controller
self._dirty = False
self._refresh_job = None
self._service_keys_to_siblings = collections.defaultdict( dict )
self._service_keys_to_reverse_lookup = collections.defaultdict( dict )
self._RefreshSiblings()
self._lock = threading.Lock()
self._controller.sub( self, 'NotifyNewSiblings', 'notify_new_siblings_data' )
def _CollapseTags( self, service_key, tags ):
siblings = self._service_keys_to_siblings[ service_key ]
return { siblings[ tag ] if tag in siblings else tag for tag in tags }
def _RefreshSiblings( self ):
self._service_keys_to_siblings = collections.defaultdict( dict )
self._service_keys_to_reverse_lookup = collections.defaultdict( dict )
local_tags_pairs = set()
tag_repo_pairs = set()
service_keys_to_statuses_to_pairs = self._controller.Read( 'tag_siblings' )
for ( service_key, statuses_to_pairs ) in list(service_keys_to_statuses_to_pairs.items()):
all_pairs = statuses_to_pairs[ HC.CONTENT_STATUS_CURRENT ].union( statuses_to_pairs[ HC.CONTENT_STATUS_PENDING ] )
if service_key == CC.LOCAL_TAG_SERVICE_KEY:
local_tags_pairs = set( all_pairs )
else:
tag_repo_pairs.update( all_pairs )
siblings = CollapseTagSiblingPairs( [ all_pairs ] )
self._service_keys_to_siblings[ service_key ] = siblings
reverse_lookup = collections.defaultdict( list )
for ( bad, | |
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""PyTorch ADAPTERBERT model. """
import math
import os
import warnings
import numpy as np
from dataclasses import dataclass
from typing import Optional, Tuple
import torch
import torch.utils.checkpoint
import torch.nn.functional as F
from packaging import version
from torch import nn
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss, KLDivLoss
from torch.distributions.beta import Beta
from ...activations import ACT2FN
from ...file_utils import (
ModelOutput,
add_code_sample_docstrings,
add_start_docstrings,
add_start_docstrings_to_model_forward,
replace_return_docstrings,
)
from ...modeling_outputs import (
BaseModelOutputWithPastAndCrossAttentions,
BaseModelOutputWithPoolingAndCrossAttentions,
CausalLMOutputWithCrossAttentions,
MaskedLMOutput,
MultipleChoiceModelOutput,
NextSentencePredictorOutput,
QuestionAnsweringModelOutput,
SequenceClassifierOutput,
TokenClassifierOutput,
DualPassageEncoderModelOutput,
)
from ...modeling_utils import (
PreTrainedModel,
apply_chunking_to_forward,
find_pruneable_heads_and_indices,
prune_linear_layer,
)
from ...utils import logging
from .configuration_adapterbert import AdapterBertConfig
from ..bert.modeling_bert import BertEmbeddings as AdapterBertEmbeddings
from ..bert.modeling_bert import BertSelfAttention as AdapterBertSelfAttention
from ..bert.modeling_bert import BertIntermediate as AdapterBertIntermediate
from ..bert.modeling_bert import BertPooler as AdapterBertPooler
logger = logging.get_logger(__name__)
_CHECKPOINT_FOR_DOC = "bert-base-uncased"
_CONFIG_FOR_DOC = "AdapterBertConfig"
_TOKENIZER_FOR_DOC = "AdapterBertTokenizer"
ADAPTERBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [
"bert-base-uncased",
"bert-large-uncased",
"bert-base-cased",
"bert-large-cased",
"bert-base-multilingual-uncased",
"bert-base-multilingual-cased",
# See all BERT models at https://huggingface.co/models?filter=bert
]
def load_tf_weights_in_adapterbert(model, config, tf_checkpoint_path):
"""Load tf checkpoints in a pytorch model."""
try:
import re
import numpy as np
import tensorflow as tf
except ImportError:
logger.error(
"Loading a TensorFlow model in PyTorch, requires TensorFlow to be installed. Please see "
"https://www.tensorflow.org/install/ for installation instructions."
)
raise
tf_path = os.path.abspath(tf_checkpoint_path)
logger.info(f"Converting TensorFlow checkpoint from {tf_path}")
# Load weights from TF model
init_vars = tf.train.list_variables(tf_path)
names = []
arrays = []
for name, shape in init_vars:
logger.info(f"Loading TF weight {name} with shape {shape}")
array = tf.train.load_variable(tf_path, name)
names.append(name)
arrays.append(array)
for name, array in zip(names, arrays):
name = name.split("/")
# adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v
# which are not required for using pretrained model
if any(
n in ["adam_v", "adam_m", "AdamWeightDecayOptimizer", "AdamWeightDecayOptimizer_1", "global_step"]
for n in name
):
logger.info(f"Skipping {'/'.join(name)}")
continue
pointer = model
for m_name in name:
if re.fullmatch(r"[A-Za-z]+_\d+", m_name):
scope_names = re.split(r"_(\d+)", m_name)
else:
scope_names = [m_name]
if scope_names[0] == "kernel" or scope_names[0] == "gamma":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "output_bias" or scope_names[0] == "beta":
pointer = getattr(pointer, "bias")
elif scope_names[0] == "output_weights":
pointer = getattr(pointer, "weight")
elif scope_names[0] == "squad":
pointer = getattr(pointer, "classifier")
else:
try:
pointer = getattr(pointer, scope_names[0])
except AttributeError:
logger.info(f"Skipping {'/'.join(name)}")
continue
if len(scope_names) >= 2:
num = int(scope_names[1])
pointer = pointer[num]
if m_name[-11:] == "_embeddings":
pointer = getattr(pointer, "weight")
elif m_name == "kernel":
array = np.transpose(array)
try:
assert (
pointer.shape == array.shape
), f"Pointer shape {pointer.shape} and array shape {array.shape} mismatched"
except AssertionError as e:
e.args += (pointer.shape, array.shape)
raise
logger.info(f"Initialize PyTorch weight {name}")
pointer.data = torch.from_numpy(array)
return model
class AdapterBertProjection(nn.Module):
def __init__(self, config):
super().__init__()
self.projection_down = nn.Linear(config.hidden_size, config.adapter_projection_size)
if isinstance(config.hidden_act, str):
self.projection_act_fn = ACT2FN[config.hidden_act]
else:
self.projection_act_fn = config.hidden_act
self.projection_up = nn.Linear(config.adapter_projection_size, config.hidden_size)
def forward(self, input_tensor):
hidden_states = self.projection_down(input_tensor)
hidden_states = self.projection_act_fn(hidden_states)
hidden_states = self.projection_up(hidden_states)
return hidden_states + input_tensor
class AdapterBertSelfOutput(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.adapter = AdapterBertProjection(config)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.adapter(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class AdapterBertAttention(nn.Module):
def __init__(self, config):
super().__init__()
self.self = AdapterBertSelfAttention(config)
self.output = AdapterBertSelfOutput(config)
self.pruned_heads = set()
def prune_heads(self, heads):
if len(heads) == 0:
return
heads, index = find_pruneable_heads_and_indices(
heads, self.self.num_attention_heads, self.self.attention_head_size, self.pruned_heads
)
# Prune linear layers
self.self.query = prune_linear_layer(self.self.query, index)
self.self.key = prune_linear_layer(self.self.key, index)
self.self.value = prune_linear_layer(self.self.value, index)
self.output.dense = prune_linear_layer(self.output.dense, index, dim=1)
# Update hyper params and store pruned heads
self.self.num_attention_heads = self.self.num_attention_heads - len(heads)
self.self.all_head_size = self.self.attention_head_size * self.self.num_attention_heads
self.pruned_heads = self.pruned_heads.union(heads)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_value=None,
output_attentions=False,
):
self_outputs = self.self(
hidden_states,
attention_mask,
head_mask,
encoder_hidden_states,
encoder_attention_mask,
past_key_value,
output_attentions,
)
attention_output = self.output(self_outputs[0], hidden_states)
outputs = (attention_output,) + self_outputs[1:] # add attentions if we output them
return outputs
class AdapterBertOutput(nn.Module):
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.intermediate_size, config.hidden_size)
self.LayerNorm = nn.LayerNorm(config.hidden_size, eps=config.layer_norm_eps)
self.dropout = nn.Dropout(config.hidden_dropout_prob)
self.adapter = AdapterBertProjection(config)
def forward(self, hidden_states, input_tensor):
hidden_states = self.dense(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.adapter(hidden_states)
hidden_states = self.LayerNorm(hidden_states + input_tensor)
return hidden_states
class AdapterBertLayer(nn.Module):
def __init__(self, config):
super().__init__()
self.chunk_size_feed_forward = config.chunk_size_feed_forward
self.seq_len_dim = 1
self.attention = AdapterBertAttention(config)
self.is_decoder = config.is_decoder
self.add_cross_attention = config.add_cross_attention
if self.add_cross_attention:
assert self.is_decoder, f"{self} should be used as a decoder model if cross attention is added"
self.crossattention = AdapterBertAttention(config)
self.intermediate = AdapterBertIntermediate(config)
self.output = AdapterBertOutput(config)
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_value=None,
output_attentions=False,
):
# decoder uni-directional self-attention cached key/values tuple is at positions 1,2
self_attn_past_key_value = past_key_value[:2] if past_key_value is not None else None
self_attention_outputs = self.attention(
hidden_states,
attention_mask,
head_mask,
output_attentions=output_attentions,
past_key_value=self_attn_past_key_value,
)
attention_output = self_attention_outputs[0]
# if decoder, the last output is tuple of self-attn cache
if self.is_decoder:
outputs = self_attention_outputs[1:-1]
present_key_value = self_attention_outputs[-1]
else:
outputs = self_attention_outputs[1:] # add self attentions if we output attention weights
cross_attn_present_key_value = None
if self.is_decoder and encoder_hidden_states is not None:
assert hasattr(
self, "crossattention"
), f"If `encoder_hidden_states` are passed, {self} has to be instantiated with cross-attention layers by setting `config.add_cross_attention=True`"
# cross_attn cached key/values tuple is at positions 3,4 of past_key_value tuple
cross_attn_past_key_value = past_key_value[-2:] if past_key_value is not None else None
cross_attention_outputs = self.crossattention(
attention_output,
attention_mask,
head_mask,
encoder_hidden_states,
encoder_attention_mask,
cross_attn_past_key_value,
output_attentions,
)
attention_output = cross_attention_outputs[0]
outputs = outputs + cross_attention_outputs[1:-1] # add cross attentions if we output attention weights
# add cross-attn cache to positions 3,4 of present_key_value tuple
cross_attn_present_key_value = cross_attention_outputs[-1]
present_key_value = present_key_value + cross_attn_present_key_value
layer_output = apply_chunking_to_forward(
self.feed_forward_chunk, self.chunk_size_feed_forward, self.seq_len_dim, attention_output
)
outputs = (layer_output,) + outputs
# if decoder, return the attn key/values as the last output
if self.is_decoder:
outputs = outputs + (present_key_value,)
return outputs
def feed_forward_chunk(self, attention_output):
intermediate_output = self.intermediate(attention_output)
layer_output = self.output(intermediate_output, attention_output)
return layer_output
class AdapterBertEncoder(nn.Module):
def __init__(self, config):
super().__init__()
self.config = config
self.layer = nn.ModuleList([AdapterBertLayer(config) for _ in range(config.num_hidden_layers)])
def forward(
self,
hidden_states,
attention_mask=None,
head_mask=None,
encoder_hidden_states=None,
encoder_attention_mask=None,
past_key_values=None,
use_cache=None,
output_attentions=False,
output_hidden_states=False,
return_dict=True,
):
all_hidden_states = () if output_hidden_states else None
all_self_attentions = () if output_attentions else None
all_cross_attentions = () if output_attentions and self.config.add_cross_attention else None
next_decoder_cache = () if use_cache else None
for i, layer_module in enumerate(self.layer):
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
layer_head_mask = head_mask[i] if head_mask is not None else None
past_key_value = past_key_values[i] if past_key_values is not None else None
if getattr(self.config, "gradient_checkpointing", False) and self.training:
if use_cache:
logger.warning(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
use_cache = False
def create_custom_forward(module):
def custom_forward(*inputs):
return module(*inputs, past_key_value, output_attentions)
return custom_forward
layer_outputs = torch.utils.checkpoint.checkpoint(
create_custom_forward(layer_module),
hidden_states,
attention_mask,
layer_head_mask,
encoder_hidden_states,
encoder_attention_mask,
)
else:
layer_outputs = layer_module(
hidden_states,
attention_mask,
layer_head_mask,
encoder_hidden_states,
encoder_attention_mask,
past_key_value,
output_attentions,
)
hidden_states = layer_outputs[0]
if use_cache:
next_decoder_cache += (layer_outputs[-1],)
if output_attentions:
all_self_attentions = all_self_attentions + (layer_outputs[1],)
if self.config.add_cross_attention:
all_cross_attentions = all_cross_attentions + (layer_outputs[2],)
if output_hidden_states:
all_hidden_states = all_hidden_states + (hidden_states,)
if not return_dict:
return tuple(
v
for v in [
hidden_states,
next_decoder_cache,
all_hidden_states,
all_self_attentions,
all_cross_attentions,
]
if v is not None
)
return BaseModelOutputWithPastAndCrossAttentions(
last_hidden_state=hidden_states,
past_key_values=next_decoder_cache,
hidden_states=all_hidden_states,
attentions=all_self_attentions,
cross_attentions=all_cross_attentions,
)
class AdapterBertPreTrainedModel(PreTrainedModel):
"""
An abstract class to handle weights initialization and a simple interface for downloading and loading pretrained
models.
"""
config_class = AdapterBertConfig
load_tf_weights = load_tf_weights_in_adapterbert
base_model_prefix = "bert"
_keys_to_ignore_on_load_missing = [r"position_ids"]
def _init_weights(self, module):
"""Initialize the weights"""
if isinstance(module, nn.Linear):
# Slightly different from the TF version which uses truncated_normal for initialization
# cf https://github.com/pytorch/pytorch/pull/5617
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.bias is not None:
module.bias.data.zero_()
elif isinstance(module, nn.Embedding):
module.weight.data.normal_(mean=0.0, std=self.config.initializer_range)
if module.padding_idx is not None:
module.weight.data[module.padding_idx].zero_()
elif isinstance(module, nn.LayerNorm):
module.bias.data.zero_()
module.weight.data.fill_(1.0)
def freeze_weights(self):
"""Freeze non-adapter weights"""
for n, p in self.named_parameters():
if "adapter.projection" not in n:
p.requires_grad = False
ADAPTERBERT_START_DOCSTRING = r"""
This model | |
<filename>katcp/test/test_kattypes.py
# test_kattypes.py
# -*- coding: utf8 -*-
# vim:fileencoding=utf8 ai ts=4 sts=4 et sw=4
# Copyright 2009 SKA South Africa (http://ska.ac.za/)
# BSD license - see COPYING for details
"""Tests for the kattypes module.
"""
import unittest2 as unittest
import mock
from katcp import Message, FailReply, AsyncReply
from katcp.kattypes import request, inform, return_reply, send_reply, \
Bool, Discrete, Float, Int, Lru, Timestamp, \
Str, Struct, Regex, DiscreteMulti, TimestampOrNow, \
StrictTimestamp, Address
MS_TO_SEC_FAC = 1/1000.
SEC_TO_MS_FAC = 1000
class TestType(unittest.TestCase):
def setUp(self):
self._pack = []
self._unpack = []
def test_pack(self):
for t, value, result in self._pack:
if type(result) is type and issubclass(result, Exception):
self.assertRaises(result, t.pack, value)
else:
self.assertEquals(t.pack(value), result)
def test_unpack(self):
for t, value, result in self._unpack:
if type(result) is type and issubclass(result, Exception):
self.assertRaises(result, t.unpack, value)
else:
self.assertEquals(t.unpack(value), result)
class TestInt(TestType):
def setUp(self):
basic = Int()
default = Int(default=11)
optional = Int(optional=True)
default_optional = Int(default=11, optional=True)
self.minmax = Int(min=5, max=6)
self._pack = [
(basic, 5, "5"),
(basic, -5, "-5"),
(basic, "a", TypeError),
(basic, None, ValueError),
(self.minmax, 5, "5"),
(self.minmax, 6, "6"),
(self.minmax, 4, ValueError),
(self.minmax, 7, ValueError),
(default, None, "11"),
(default_optional, None, "11"),
(optional, None, ValueError),
]
self._unpack = [
(basic, "5", 5),
(basic, "-5", -5),
(basic, "a", ValueError),
(basic, None, ValueError),
(self.minmax, "5", 5),
(self.minmax, "6", 6),
(self.minmax, "4", ValueError),
(self.minmax, "7", ValueError),
(default, None, 11),
(default_optional, None, 11),
(optional, None, None),
]
class TestFloat(TestType):
def setUp(self):
basic = Float()
default = Float(default=11.0)
optional = Float(optional=True)
default_optional = Float(default=11.0, optional=True)
self.minmax = Float(min=5.0, max=6.0)
self._pack = [
(basic, 5.0, "5"),
(basic, -5.0, "-5"),
(basic, "a", TypeError),
(basic, None, ValueError),
(self.minmax, 5.0, "5"),
(self.minmax, 6.0, "6"),
(self.minmax, 4.5, ValueError),
(self.minmax, 6.5, ValueError),
(default, None, "11"),
(default_optional, None, "11"),
(optional, None, ValueError),
]
self._unpack = [
(basic, "5", 5.0),
(basic, "-5", -5.0),
(basic, "a", ValueError),
(basic, None, ValueError),
(self.minmax, "5", 5.0),
(self.minmax, "6", 6.0),
(self.minmax, "4.5", ValueError),
(self.minmax, "6.5", ValueError),
(default, None, 11.0),
(default_optional, None, 11.0),
(optional, None, None),
]
class TestBool(TestType):
def setUp(self):
basic = Bool()
default = Bool(default=True)
optional = Bool(optional=True)
default_optional = Bool(default=True, optional=True)
self._pack = [
(basic, True, "1"),
(basic, False, "0"),
(basic, 1, "1"),
(basic, 0, "0"),
(basic, "a", "1"),
(basic, None, ValueError),
(default, None, "1"),
(default_optional, None, "1"),
(optional, None, ValueError),
]
self._unpack = [
(basic, "1", True),
(basic, "0", False),
(basic, "a", ValueError),
(basic, None, ValueError),
(default, None, True),
(default_optional, None, True),
(optional, None, None),
]
class TestDiscrete(TestType):
def setUp(self):
basic = Discrete(("VAL1", "VAL2"))
default = Discrete(("VAL1", "VAL2"), default="VAL1")
optional = Discrete(("VAL1", "VAL2"), optional=True)
default_optional = Discrete(("VAL1", "VAL2"), default="VAL1",
optional=True)
case_insensitive = Discrete(("val1", "VAL2"), case_insensitive=True)
self._pack = [
(basic, "VAL1", "VAL1"),
(basic, "VAL2", "VAL2"),
(basic, "a", ValueError),
(basic, "val1", ValueError),
(basic, None, ValueError),
(default, None, "VAL1"),
(default_optional, None, "VAL1"),
(optional, None, ValueError),
(case_insensitive, "VAL1", "VAL1"),
(case_insensitive, "vAl2", "vAl2"),
(case_insensitive, "a", ValueError),
]
self._unpack = [
(basic, "VAL1", "VAL1"),
(basic, "VAL2", "VAL2"),
(basic, "a", ValueError),
(basic, None, ValueError),
(default, None, "VAL1"),
(default_optional, None, "VAL1"),
(optional, None, None),
(case_insensitive, "val1", "val1"),
(case_insensitive, "vAl2", "vAl2"),
(case_insensitive, "a", ValueError),
]
class TestLru(TestType):
def setUp(self):
basic = Lru()
default = Lru(default=Lru.LRU_NOMINAL)
optional = Lru(optional=True)
default_optional = Lru(default=Lru.LRU_NOMINAL, optional=True)
self._pack = [
(basic, Lru.LRU_NOMINAL, "nominal"),
(basic, Lru.LRU_ERROR, "error"),
(basic, "a", ValueError),
(basic, None, ValueError),
(default, None, "nominal"),
(default_optional, None, "nominal"),
(optional, None, ValueError),
]
self._unpack = [
(basic, "nominal", Lru.LRU_NOMINAL),
(basic, "error", Lru.LRU_ERROR),
(basic, "a", ValueError),
(basic, None, ValueError),
(default, None, Lru.LRU_NOMINAL),
(default_optional, None, Lru.LRU_NOMINAL),
(optional, None, None),
]
class TestAddress(TestType):
def setUp(self):
basic = Address()
default = Address(default=("127.0.0.1", None))
optional = Address(optional=True)
default_optional = Address(default=("127.0.0.1", None), optional=True)
self._pack = [
(basic, ("127.0.0.1", None), "127.0.0.1"),
(basic, ("127.0.0.1", 80), "127.0.0.1:80"),
(basic, ("0:0:0:0:0:0:0:1", None), "[0:0:0:0:0:0:0:1]"),
(basic, ("::1", None), "[::1]"),
(basic, ("::FFFF:172.16.31.10", None),
"[::FFFF:204.152.189.116]"),
(basic, ("::1", 80), "[::1]:80"),
(basic, "127.0.0.1", ValueError), # value not a tuple
(default, None, "127.0.0.1"),
(default_optional, None, "127.0.0.1"),
(optional, None, ValueError),
]
self._unpack = [
(basic, "127.0.0.1", ("127.0.0.1", None)),
(basic, "127.0.0.1:80", ("127.0.0.1", 80)),
(basic, "[0:0:0:0:0:0:0:1]", ("0:0:0:0:0:0:0:1", None)),
(basic, "[::1]", ("::1", None)),
(basic, "[::FFFF:204.152.189.116]", ("::FFFF:172.16.31.10",
None)),
(basic, "[::1]:80", ("::1", 80)),
(basic, "127.0.0.1:foo", ValueError),
(basic, None, ValueError),
(default, None, ("127.0.0.1", None)),
(default_optional, None, ("127.0.0.1", None)),
(optional, None, None),
]
class TestTimestamp(TestType):
def setUp(self):
basic = Timestamp()
default = Timestamp(default=1235475793.0324881)
optional = Timestamp(optional=True)
default_optional = Timestamp(default=1235475793.0324881, optional=True)
self._pack = [
(basic, 1235475381.6966901, "1235475381.696690"),
(basic, "a", ValueError),
(basic, None, ValueError),
(default, None, "1235475793.032488"),
(default_optional, None, "1235475793.032488"),
(optional, None, ValueError),
]
self._unpack = [
(basic, "1235475381.696", 1235475381.6960001),
(basic, "a", ValueError),
(basic, None, ValueError),
(default, None, 1235475793.0324881),
(default_optional, None, 1235475793.0324881),
(optional, None, None),
]
class TestStrictTimestamp(TestType):
def setUp(self):
basic = StrictTimestamp()
default = StrictTimestamp(default=1235475793.03249)
optional = StrictTimestamp(optional=True)
default_optional = StrictTimestamp(default=1235475793.03249,
optional=True)
self._pack = [
(basic, 1235475381.69669, "1235475381.69669"),
(basic, "a", ValueError),
(basic, None, ValueError),
(default, None, "1235475793.03249"),
(default_optional, None, "1235475793.03249"),
(optional, None, ValueError),
]
self._unpack = [
(basic, "1235475381.696", 1235475381.6960001),
(basic, "a", ValueError),
(basic, None, ValueError),
(default, None, 1235475793.03249),
(default_optional, None, 1235475793.03249),
(optional, None, None),
]
class TestTimestampOrNow(TestType):
def setUp(self):
basic = TimestampOrNow()
default = TimestampOrNow(default=1235475793.0324881)
optional = TimestampOrNow(optional=True)
default_optional = TimestampOrNow(default=1235475793.0324881,
optional=True)
default_now = TimestampOrNow(default=TimestampOrNow.NOW)
self._pack = [
(basic, 1235475381.6966901, "1235475381.696690"),
(basic, "a", ValueError),
(basic, TimestampOrNow.NOW, "now"),
(basic, None, ValueError),
(default, None, "1235475793.032488"),
(default, TimestampOrNow.NOW, "now"),
(default_optional, None, "1235475793.032488"),
(optional, None, ValueError),
(default_now, None, "now"),
]
self._unpack = [
(basic, "1235475381.696", 1235475381.6960001),
(basic, "a", ValueError),
(basic, "now", TimestampOrNow.NOW),
(basic, None, ValueError),
(default, None, 1235475793.0324881),
(default, "now", TimestampOrNow.NOW),
(default_optional, None, 1235475793.0324881),
(optional, None, None),
(default_now, None, TimestampOrNow.NOW),
]
class TestStr(TestType):
def setUp(self):
basic = Str()
default = Str(default="something")
optional = Str(optional=True)
default_optional = Str(default="something", optional=True)
self._pack = [
(basic, "adsasdasd", "adsasdasd"),
(basic, None, ValueError),
(default, None, "something"),
(default_optional, None, "something"),
(optional, None, ValueError),
]
self._unpack = [
(basic, "adsasdasd", "adsasdasd"),
(basic, None, ValueError),
(default, None, "something"),
(default_optional, None, "something"),
(optional, None, None),
]
class TestStruct(TestType):
def setUp(self):
basic = Struct(">isf")
default = Struct(">isf", default=(1, "f", 3.4))
optional = Struct(">isf", optional=True)
default_optional = Struct(">isf", default=(1, "f", 3.4), optional=True)
self._pack = [
(basic, (5, "s", 2.5), "\x00\x00\x00\x05s@ \x00\x00"),
(basic, ("s", 5, 2.5), ValueError),
(basic, (5, "s"), ValueError),
(basic, None, ValueError),
(default, None, "\x00\x00\x00\x01f@Y\x99\x9a"),
(default_optional, None, "\x00\x00\x00\x01f@Y\x99\x9a"),
(optional, None, ValueError),
]
self._unpack = [
(basic, "\x00\x00\x00\x05s@ \x00\x00", (5, "s", 2.5)),
(basic, "asdfgasdfas", ValueError),
(basic, None, ValueError),
(default, None, (1, "f", 3.4)),
(default_optional, None, (1, "f", 3.4)),
(optional, None, None),
]
class TestRegex(TestType):
def setUp(self):
basic = Regex("\d\d:\d\d:\d\d")
default = Regex("\d\d:\d\d:\d\d", default="00:00:00")
optional = Regex("\d\d:\d\d:\d\d", optional=True)
default_optional = Regex("\d\d:\d\d:\d\d", default="00:00:00",
optional=True)
self._pack = [
(basic, "12:34:56", "12:34:56"),
(basic, "sdfasdfsadf", ValueError),
(basic, None, ValueError),
(default, None, "00:00:00"),
(default_optional, None, "00:00:00"),
(optional, None, ValueError),
]
self._unpack = [
(basic, "12:34:56", "12:34:56"),
(basic, "sdfasdfsadf", ValueError),
(basic, None, ValueError),
(default, None, "00:00:00"),
(default_optional, None, "00:00:00"),
(optional, None, None),
]
class TestDiscreteMulti(TestType):
def setUp(self):
basic = DiscreteMulti(("VAL1", "VAL2"))
default = DiscreteMulti(("VAL1", "VAL2"), default=["VAL1"])
optional = DiscreteMulti(("VAL1", "VAL2"), optional=True)
default_optional = DiscreteMulti(("VAL1", "VAL2"), default=["VAL1"],
optional=True)
case_insensitive = DiscreteMulti(("val1", "VAL2"),
case_insensitive=True)
self._pack = [
(basic, ["VAL1"], "VAL1"),
(basic, ["VAL2"], "VAL2"),
(basic, ["VAL1", "VAL2"], "VAL1,VAL2"),
(basic, "a", ValueError),
(basic, "VAL1", ValueError),
(basic, ["aaa"], ValueError),
(basic, ["val1"], ValueError),
(basic, ["VAL1", "val2"], ValueError),
(basic, ["VAL1", "aaa"], ValueError),
(basic, None, ValueError),
(default, None, "VAL1"),
(default_optional, None, "VAL1"),
(optional, None, ValueError),
(case_insensitive, ["VAL1"], "VAL1"),
(case_insensitive, ["vAl2"], "vAl2"),
(case_insensitive, ["VAL1", "val2"], "VAL1,val2"),
(case_insensitive, ["aaa"], ValueError),
]
self._unpack = [
(basic, "VAL1", ["VAL1"]),
(basic, "VAL2", ["VAL2"]),
(basic, "VAL1,VAL2", ["VAL1", "VAL2"]),
(basic, "all", ["VAL1", "VAL2"]),
(basic, "VAL1,aaa", ValueError),
(basic, "VAL1,val2", ValueError),
(basic, "a", ValueError),
(basic, None, ValueError),
(default, None, ["VAL1"]),
(default_optional, None, ["VAL1"]),
(optional, None, None),
(case_insensitive, "val1", ["val1"]),
(case_insensitive, "vAl2", ["vAl2"]),
(case_insensitive, "VAL1,val2", ["VAL1", "val2"]),
(case_insensitive, "VAL1,aaa", ValueError),
(case_insensitive, "a", ValueError),
]
class TestDevice(object):
def __init__(self):
self.sent_messages = []
@request(Int(min=1, max=10), Discrete(("on", "off")), Bool())
@return_reply(Int(min=1, max=10), Discrete(("on", "off")), Bool())
def request_one(self, req, i, d, b):
if i == 3:
return ("fail", "I failed!")
if i == 5:
return ("bananas", "This should never be sent")
if i == 6:
return ("ok", i, d, b, "extra parameter")
if i == 9:
self.finish_request_one(req, i, d, b)
raise AsyncReply()
return ("ok", i, d, b)
@send_reply(Int(min=1, max=10), Discrete(("on", "off")), Bool())
def finish_request_one(self, req, i, d, b):
return (req, "ok", i, d, b)
def reply(self, req, msg, | |
<reponame>VladimirZubavlenko/ikaf42-app
import logging
from django.core.mail import send_mail
from django.db.models import Max
from django.http import HttpResponse
from minio.error import (ResponseError)
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
import const as CONST
from researchWork.models import ResearchPaper, Feed, ResearchStatus
from researchWork.serializers import ResearchPaperSerializer, NewWorkSerializer, \
FeedReturnSerializer, FeedReturnSerializer2, WorkUpdate, WorkInfoSerializer, AllStatesSerializer
from user.models import User
# Create your views here.
logger = logging.getLogger(__name__)
class PapersList(APIView):
permission_classes = [IsAuthenticated]
serializer_class = ResearchPaperSerializer
def get(self, request):
logger.warning('IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(
request.query_params))
if request.user.role.pk == CONST.STUDENT_ROLE:
obj = ResearchPaper.objects.filter(author_id=request.user.id, hidden=False).order_by('createdAt')
response = self.serializer_class(obj, many=True)
return Response({'type': 'success', 'message': response.data})
if request.user.role.pk == CONST.TEACHER_ROLE:
obj = ResearchPaper.objects.filter(advisory_id=request.user.id).order_by('createdAt')
response = self.serializer_class(obj, many=True)
return Response({'type': 'success', 'message': response.data})
if request.user.role.pk == CONST.DEPARTMENT_ROLE:
obj = ResearchPaper.objects.all().order_by('createdAt')
response = self.serializer_class(obj, many=True)
return Response({'type': 'success', 'message': response.data})
return Response({'type': 'error', 'message': 'Ошибка аутентификации'})
class WorkInfo(APIView):
permission_classes = [IsAuthenticated]
serializer_class = WorkInfoSerializer
def get(self, request):
logger.warning('IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(
request.query_params))
workObj = ResearchPaper.objects.get(pk=request.query_params.get('id'))
if request.user.id == workObj.author_id_id or \
request.user.id == workObj.advisory_id_id or \
request.user.id == workObj.consultant_id_id or \
request.user.role.pk == CONST.DEPARTMENT_ROLE:
advisoryObj = User.objects.get(pk=workObj.advisory_id_id)
authorObj = User.objects.get(pk=workObj.author_id_id)
try:
consObj = User.objects.get(pk=workObj.consultant_id_id)
except Exception as err:
consObj = User()
consObj.lastName = ''
consObj.firstName = ''
consObj.middleName = ''
response = {'status': workObj.researchStatus_id.pk, 'description': workObj.description, 'theme': workObj.theme,
'objective': workObj.objective,
'results': workObj.results, 'content': workObj.content, 'sources': workObj.sources,
'reportFile': str(workObj.reportFile),
'presentationFile': str(workObj.presentationFile),
'isNew': workObj.isNew,
'review': workObj.review,
'authorEmail': authorObj.email,
'authorIsActive': authorObj.is_active,
'authorLastName': authorObj.lastName,
'authorFirstName': authorObj.firstName,
'authorMiddleName': authorObj.middleName,
'authorPhone': authorObj.phone,
'authorGroup': authorObj.studyGroup.name,
'advisoryID': advisoryObj.pk,
'advisoryEmail': advisoryObj.email,
'advisoryIsActive': advisoryObj.is_active,
'advisoryLastName': advisoryObj.lastName,
'advisoryFirstName': advisoryObj.firstName,
'advisoryMiddleName': advisoryObj.middleName,
'advisoryPhone': advisoryObj.phone,
'advisoryWorkPlace': advisoryObj.workPlace,
'advisoryPosition': advisoryObj.position,
'advisoryAcademicRank': advisoryObj.academicRank,
'advisoryAcademicDegree': advisoryObj.academicDegree,
'consultantLastName': consObj.lastName,
'consultantFirstName': consObj.firstName,
'consultantMiddleName': consObj.middleName
}
return Response({'type': 'success', 'message': response})
else:
return Response({'type': 'error', 'message': 'Это не ваша работа!'}, status=status.HTTP_403_FORBIDDEN)
"""
serializer = self.serializer_class(workObj)
return Response({'type': 'success', 'message': [serializer.data]})"""
class NewWork(APIView):
permission_classes = [IsAuthenticated]
serializer_class = NewWorkSerializer
def post(self, request):
logger.warning(
'IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(request.data))
if request.user.role.pk == CONST.STUDENT_ROLE:
json = request.data
json.update({'author_id': str(request.user.pk)})
serializer = self.serializer_class(data=json)
serializer.is_valid(raise_exception=True)
serializer.save()
obj = ResearchPaper.objects.filter(author_id=request.user.pk).latest('id')
objStatus = ResearchStatus.objects.get(id=1)
Feed.objects.create(researchPaper_id=obj, user=request.user, text='Задание создано')
obj.researchStatus_id = objStatus
obj.save()
print(obj.author_id.email)
send_mail('Изменение статуса работы',
'Статус работы ' + obj.description + ' изменен на ' + objStatus.name + '.',
'<EMAIL>',
[obj.author_id.email],
fail_silently=False)
return Response(
{
'type': 'success', 'message': 'Задание создано' # + serializer.data.get('theme', None)
},
)
else:
return Response({'type': 'error', 'message': 'Вы не студент.'}, status=status.HTTP_403_FORBIDDEN)
class FeedReturn(APIView):
permission_classes = [IsAuthenticated]
serializer_class = FeedReturnSerializer
def get(self, request):
logger.warning('IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(
request.query_params))
obj = ResearchPaper.objects.get(pk=request.query_params.get('id'))
if obj.author_id.pk == request.user.pk or obj.advisory_id.pk == request.user.pk or request.user.role.pk == CONST.DEPARTMENT_ROLE:
feeds = Feed.objects.filter(researchPaper_id=request.query_params.get('id'))
serializer = self.serializer_class(feeds, many=True)
# response = self.checkObjects(serializer, request.user.id, request.user.role.id, len(serializer.data))
return Response({'type': 'success', 'message': serializer.data})
else:
return Response({'type': 'error', 'message': 'Это не ваша работа.'}, status=status.HTTP_403_FORBIDDEN)
class FeedWorkComment(APIView):
permission_classes = [IsAuthenticated]
serializer_class = FeedReturnSerializer2
def post(self, request):
logger.warning(
'IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(request.data))
obj = ResearchPaper.objects.get(pk=request.data['researchPaper_id'])
if obj.author_id.pk == request.user.pk or obj.advisory_id.pk == request.user.pk or request.user.role.pk == CONST.DEPARTMENT_ROLE:
json = request.data
json.update({'user': str(request.user.id)})
serializer = self.serializer_class(data=json)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(
{
'type': 'success',
'message': 'Создан комментарий ' + serializer.data.get('text', None) + " пользователем - " + str(
serializer.data.get('user', None)) + " в " + str(serializer.data.get('date', None))
}
)
else:
return Response({'type': 'error', 'message': 'Вы не имеете доступ к данной работе'},
status=status.HTTP_403_FORBIDDEN)
class DeleteWork(APIView):
permission_classes = [IsAuthenticated]
def post(self, request):
logger.warning(
'IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(request.data))
if request.user.role.pk == CONST.STUDENT_ROLE:
obj = ResearchPaper.objects.get(pk=request.data['id'])
if obj.author_id.pk == request.user.pk and obj.researchStatus_id.pk == 1:
obj.hidden = True
obj.save()
return Response(
{
'type': 'success', 'message': 'Работа ' + str(request.data['id']) + ' удалена'
}
)
else:
return Response({'type': 'error', 'message': 'Невозможно удалить работу'}, status=status.HTTP_400_BAD_REQUEST)
else:
return Response({'type': 'error', 'message': 'А вы точно студент?'}, status=status.HTTP_403_FORBIDDEN)
class UpdateWork(APIView):
permission_classes = [IsAuthenticated]
serializer_class = WorkUpdate
def post(self, request):
logger.warning(
'IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(request.data))
if request.user.role.pk == CONST.STUDENT_ROLE:
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
if serializer.data['theme'] == '' or serializer.data['objective'] == '' or serializer.data['content'] == '':
return Response({'type': 'error', 'message': 'Тема, цель и содержание должны быть заполнены!'})
else:
obj = ResearchPaper.objects.get(pk=request.data['pk'])
if obj.researchStatus_id.pk == 1:
serializer.update(obj, serializer.validated_data)
objStatus = ResearchStatus.objects.get(id=2)
obj.researchStatus_id = objStatus
obj.isNew = False
Feed.objects.create(researchPaper_id=obj, user=request.user, text='Задание отправлено на проверку научному руководителю')
obj.save()
send_mail('Подтвердите задание на НИР!',
'Задание на НИР студента ' + obj.author_id.lastName + ' ' + obj.author_id.firstName +
' на тему: "' + obj.theme + '" ожидает Вашего подтверждения.',
'<EMAIL>',
[obj.advisory_id.email],
fail_silently=False)
return Response(
{
'type': 'success', 'message': 'Задание отправлено на проверку научному руководителю'
},
status=status.HTTP_200_OK,
)
else:
return Response({'type': 'error', 'message': 'Информация о задании не может быть изменена'})
else:
return Response({'type': 'error', 'message': 'А вы точно студент?'}, status=status.HTTP_403_FORBIDDEN)
class WorkPush(APIView):
permission_classes = [IsAuthenticated]
def post(self, request):
logger.warning(
'IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(request.data))
obj = ResearchPaper.objects.get(id=request.data['id'])
state = obj.researchStatus_id.pk
user_id = request.user.pk
role = request.user.role.pk
if state == 2 and user_id == obj.advisory_id.pk:
return self.saveState(obj, 3, request, 'Задание одобрено научным руководителем')
elif state == 3 and role == CONST.DEPARTMENT_ROLE:
return self.saveState(obj, 4, request, 'Задание одобрено кафедрой. Ожидается загрузка отчета о работе')
elif state == 5 and user_id == obj.advisory_id.pk:
return self.saveState(obj, 6, request, 'Отчет одобрен научным руководителем')
elif state == 6 and role == CONST.DEPARTMENT_ROLE:
return self.saveState(obj, 7, request, 'Отчет одобрен кафедрой. Студент допущен к защите, ожидается презентация к защите')
else:
return Response({'type': 'error', 'message': 'Статус не может быть изменен'})
def saveState(self, obj, pk_status, request, msg):
newState = ResearchStatus.objects.get(id=pk_status)
obj.researchStatus_id = newState
Feed.objects.create(researchPaper_id=obj, user=request.user, text=msg)
obj.save()
self.sendEmail(obj, [obj.author_id.email])
if newState == 3 or newState == 6:
self.sendDepEmail(obj, [User.objects.get(role_id=CONST.DEPARTMENT_ROLE)])
return Response(
{'type': 'success', 'message': str(msg)})
def sendEmail(self, obj, receivers):
send_mail('Изменение статуса работы',
'Статус работы ' + obj.theme + ' изменен на ' + obj.researchStatus_id.name + '.',
'<EMAIL>',
receivers,
fail_silently=False)
def sendDepEmail(self, obj, receivers):
send_mail('Изменение статуса работы',
'Работа "' + obj.theme + '" студента ' + obj.author_id.lastName + ' ' + obj.author_id.firstName + ' пришла на проверку.',
'<EMAIL>',
receivers,
fail_silently=False)
class WorkPop(APIView):
permission_classes = [IsAuthenticated]
def post(self, request):
logger.warning(
'IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(request.data))
obj = ResearchPaper.objects.get(id=request.data['id'])
state = obj.researchStatus_id.pk
user_id = request.user.pk
role = request.user.role.pk
if state == 2 and user_id == obj.advisory_id.pk:
return self.saveState(obj, 1, request, 'Задание отправлено на доработку научным руководителем')
elif state == 3 and role == CONST.DEPARTMENT_ROLE:
return self.saveState(obj, 1, request, 'Задание отправлено на доработку кафедрой')
elif state == 5 and user_id == obj.advisory_id.pk:
return self.saveState(obj, 4, request, 'Отчет отправлен на доработку научным руководителем')
elif state == 6 and role == CONST.DEPARTMENT_ROLE:
return self.saveState(obj, 4, request, 'Отчет отправлен на доработку кафедрой')
else:
return Response({'type': 'error', 'message': 'Статус не может быть изменен'})
def saveState(self, obj, pk_status, request, msg):
newState = ResearchStatus.objects.get(id=pk_status)
obj.researchStatus_id = newState
Feed.objects.create(researchPaper_id=obj, user=request.user, text=msg)
obj.save()
self.sendEmail(obj, [obj.author_id.email])
return Response(
{'type': 'success', 'message': str(msg)})
def sendEmail(self, obj, receivers):
send_mail('Изменение статуса работы',
'Статус работы ' + obj.description + ' изменен на ' + obj.researchStatus_id.name + '.',
'<EMAIL>',
receivers,
fail_silently=False)
class StudentWorkReturn(APIView):
permission_classes = [IsAuthenticated]
def post(self, request):
logger.warning(
'IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(request.data))
if request.user.role.pk == CONST.STUDENT_ROLE:
obj = ResearchPaper.objects.get(id=request.data['id'])
if request.user.id == obj.author_id.id:
state = obj.researchStatus_id.pk
if state == 2 or state == 3 or state == 4:
newState = ResearchStatus.objects.get(id=1)
obj.researchStatus_id = newState
Feed.objects.create(researchPaper_id=obj, user=request.user, text='Студент вернул задание на доработку')
obj.save()
return Response({'type': 'success',
'message': 'Работа возвращена на доработку'})
else:
return Response({'type': 'error', 'message': 'Статус не может быть изменен'})
else:
return Response({'type': 'error', 'message': 'Статус не может быть изменен'},
status=status.HTTP_403_FORBIDDEN)
else:
return Response({'type': 'error', 'message': 'Статус не может быть изменен'}, status=status.HTTP_403_FORBIDDEN)
class StudentReportReturn(APIView):
permission_classes = [IsAuthenticated]
def post(self, request):
logger.warning(
'IP: ' + CONST.get_client_ip(request) + ' USER:' + str(request.user) + ' REQUEST:' + str(request.data))
if request.user.role.pk == CONST.STUDENT_ROLE:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.