hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c3822b069ee64df66a75ee6dfa970295d128aba | 23,556 | py | Python | bl3save/cli_prof_edit.py | DataCluster0/wonderlands-cli-saveedit | 91c7cf30289d62b5d51963da39f80d85c8cb97c5 | [
"Zlib"
] | 78 | 2020-04-05T19:49:31.000Z | 2022-03-09T10:01:40.000Z | bl3save/cli_prof_edit.py | DataCluster0/wonderlands-cli-saveedit | 91c7cf30289d62b5d51963da39f80d85c8cb97c5 | [
"Zlib"
] | 24 | 2020-04-10T16:22:09.000Z | 2022-03-26T11:46:55.000Z | bl3save/cli_prof_edit.py | DataCluster0/wonderlands-cli-saveedit | 91c7cf30289d62b5d51963da39f80d85c8cb97c5 | [
"Zlib"
] | 19 | 2020-04-10T17:32:56.000Z | 2022-03-27T22:23:05.000Z | #!/usr/bin/env python3
# vim: set expandtab tabstop=4 shiftwidth=4:
# Copyright (c) 2020-2021 CJ Kucera (cj@apocalyptech.com)
#
# This software is provided 'as-is', without any express or implied warranty.
# In no event will the authors be held liable for any damages arising from
# the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software in a
# product, an acknowledgment in the product documentation would be
# appreciated but is not required.
#
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
#
# 3. This notice may not be removed or altered from any source distribution.
import os
import sys
import bl3save
import argparse
from . import cli_common
from bl3save.bl3profile import BL3Profile
def main():
# Set up args
parser = argparse.ArgumentParser(
description='Borderlands 3 CLI Profile Editor v{} (PC Only)'.format(bl3save.__version__),
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
epilog="""
The default output type of "profile" will output theoretically-valid
profile which can be loaded into BL3. The output type "protobuf"
will save out the extracted, decrypted protobufs. The output
type "json" will output a JSON-encoded version of the protobufs
in question. The output type "items" will output a text file
containing base64-encoded representations of items in the user's
bank. These can be read back in using the -i/--import-items
option. Note that these are NOT the same as the item strings used
by the BL3 Memory Editor.
"""
)
parser.add_argument('-V', '--version',
action='version',
version='BL3 CLI SaveEdit v{}'.format(bl3save.__version__),
)
parser.add_argument('-o', '--output',
choices=['profile', 'protobuf', 'json', 'items'],
default='profile',
help='Output file format',
)
parser.add_argument('--csv',
action='store_true',
help='When importing or exporting items, use CSV files',
)
parser.add_argument('-f', '--force',
action='store_true',
help='Force output file to overwrite',
)
parser.add_argument('-q', '--quiet',
action='store_true',
help='Supress all non-essential output')
# Now the actual arguments
parser.add_argument('--golden-keys',
dest='golden_keys',
type=int,
help='Number of Golden Keys in the profile',
)
parser.add_argument('--diamond-keys',
dest='diamond_keys',
type=int,
help='Number of Diamond Keys in the profile',
)
parser.add_argument('--vaultcard1-keys',
dest='vaultcard1_keys',
type=int,
help='Number of Vault Card 1 Keys in the profile',
)
parser.add_argument('--vaultcard1-chests',
dest='vaultcard1_chests',
type=int,
help='Number of Vault Card 1 Chests available in the profile',
)
parser.add_argument('--vaultcard2-keys',
dest='vaultcard2_keys',
type=int,
help='Number of Vault Card 2 Keys in the profile',
)
parser.add_argument('--vaultcard2-chests',
dest='vaultcard2_chests',
type=int,
help='Number of Vault Card 2 Chests available in the profile',
)
parser.add_argument('--vaultcard3-keys',
dest='vaultcard3_keys',
type=int,
help='Number of Vault Card 3 Keys in the profile',
)
parser.add_argument('--vaultcard3-chests',
dest='vaultcard3_chests',
type=int,
help='Number of Vault Card 3 Chests available in the profile',
)
# Arguably we could be using a mutually-exclusive group for many of these
# GR options, but I can see some potential value in specifying more than
# one, so I'm not bothering.
parser.add_argument('--zero-guardian-rank',
dest='zero_guardian_rank',
action='store_true',
help='Zero out profile Guardian Rank',
)
parser.add_argument('--min-guardian-rank',
dest='min_guardian_rank',
action='store_true',
help='Set Guardian Rank to minimum required to prevent overwriting by saves',
)
parser.add_argument('--guardian-rank-rewards',
dest='guardian_rank_rewards',
type=int,
help='Set Guardian Rank rewards to the specified number of tokens each',
)
parser.add_argument('--guardian-rank-tokens',
dest='guardian_rank_tokens',
type=int,
help="Number of available Guardian Rank tokens",
)
parser.add_argument('--reset-borderlands-science',
dest='reset_borderlands_science',
action='store_true',
help='Reset Borderlands Science progression',
)
parser.add_argument('--max-borderlands-science',
dest='max_borderlands_science',
action='store_true',
help='Maximize Borderlands Science progression, unlocking True Tannis',
)
parser.add_argument('--remove-borderlands-science-boosts',
dest='remove_borderlands_science_boosts',
action='store_true',
help='Remove the currently active borderlands science boost',
)
parser.add_argument('--borderlands-science-tokens',
dest='borderlands_science_tokens',
type=int,
help="Number of available Borderlands Science tokens",
)
itemlevelgroup = parser.add_mutually_exclusive_group()
itemlevelgroup.add_argument('--item-levels-max',
dest='item_levels_max',
action='store_true',
help='Set all bank items to max level')
itemlevelgroup.add_argument('--item-levels',
dest='item_levels',
type=int,
help='Set all bank items to the specified level')
itemmayhemgroup = parser.add_mutually_exclusive_group()
itemmayhemgroup.add_argument('--item-mayhem-max',
dest='item_mayhem_max',
action='store_true',
help='Set all bank items to the maximum Mayhem level ({})'.format(bl3save.mayhem_max))
itemmayhemgroup.add_argument('--item-mayhem-levels',
dest='item_mayhem_levels',
type=int,
choices=range(bl3save.mayhem_max+1),
help='Set all bank items to the specified Mayhem level (0 to remove)')
parser.add_argument('-i', '--import-items',
dest='import_items',
type=str,
help='Import items from file',
)
parser.add_argument('--allow-fabricator',
dest='allow_fabricator',
action='store_true',
help='Allow importing Fabricator when importing items from file',
)
parser.add_argument('--clear-customizations',
dest='clear_customizations',
action='store_true',
help='Remove all unlocked customizations',
)
parser.add_argument('--alpha',
dest='alpha',
action='store_true',
help='Alphabetize unlocked room decorations, trinkets, and weapon skins',
)
unlock_choices = [
'lostloot', 'bank',
'skins', 'heads',
'echothemes', 'emotes', 'decos',
'weaponskins', 'trinkets',
'customizations',
]
parser.add_argument('--unlock',
action=cli_common.DictAction,
choices=unlock_choices + ['all'],
default={},
help='Game features to unlock',
)
# Positional args
parser.add_argument('input_filename',
help='Input filename',
)
parser.add_argument('output_filename',
help='Output filename',
)
# Parse args
args = parser.parse_args()
# Expand any of our "all" unlock actions
if 'all' in args.unlock:
args.unlock = {k: True for k in unlock_choices}
elif 'customizations' in args.unlock:
args.unlock['skins'] = True
args.unlock['heads'] = True
args.unlock['echothemes'] = True
args.unlock['emotes'] = True
args.unlock['decos'] = True
args.unlock['weaponskins'] = True
args.unlock['trinkets'] = True
# Set max item level arg
if args.item_levels_max:
args.item_levels = bl3save.max_level
# Set max mayhem arg
if args.item_mayhem_max:
args.item_mayhem_levels = bl3save.mayhem_max
# Check key counts; don't let them be below zero
if args.golden_keys is not None and args.golden_keys < 0:
raise argparse.ArgumentTypeError('Golden keys cannot be negative')
if args.diamond_keys is not None and args.diamond_keys < 0:
raise argparse.ArgumentTypeError('Diamond keys cannot be negative')
if args.vaultcard1_keys is not None and args.vaultcard1_keys < 0:
raise argparse.ArgumentTypeError('Vault Card 1 keys cannot be negative')
if args.vaultcard1_chests is not None and args.vaultcard1_chests < 0:
raise argparse.ArgumentTypeError('Vault Card 1 chests cannot be negative')
if args.vaultcard2_keys is not None and args.vaultcard2_keys < 0:
raise argparse.ArgumentTypeError('Vault Card 2 keys cannot be negative')
if args.vaultcard2_chests is not None and args.vaultcard2_chests < 0:
raise argparse.ArgumentTypeError('Vault Card 2 chests cannot be negative')
if args.vaultcard3_keys is not None and args.vaultcard3_keys < 0:
raise argparse.ArgumentTypeError('Vault Card 3 keys cannot be negative')
if args.vaultcard3_chests is not None and args.vaultcard3_chests < 0:
raise argparse.ArgumentTypeError('Vault Card 3 chests cannot be negative')
# Check item level. The max storeable in the serial number is 127, but the
# effective limit in-game is 100, thanks to MaxGameStage attributes. We
# could use `bl3save.max_level` here, too, of course, but in the event that
# I don't get this updated in a timely fashion, having it higher would let
# this util potentially continue to be able to level up gear.
if args.item_levels:
if args.item_levels < 1 or args.item_levels > 100:
raise argparse.ArgumentTypeError('Valid item level range is 1 through 100')
if args.item_levels > bl3save.max_level:
print('WARNING: Setting item levels to {}, when {} is the currently-known max'.format(
args.item_levels,
bl3save.max_level,
))
# Check for overwrite warnings
if os.path.exists(args.output_filename) and not args.force:
if args.output_filename == args.input_filename:
confirm_msg = 'Really overwrite {} with specified changes (no backup will be made)'.format(args.output_filename)
else:
confirm_msg = '{} already exists. Overwrite'.format(args.output_filename)
sys.stdout.write('WARNING: {} [y/N]? '.format(confirm_msg))
sys.stdout.flush()
response = sys.stdin.readline().strip().lower()
if len(response) == 0 or response[0] != 'y':
print('Aborting!')
sys.exit(1)
print('')
# Now load the profile
if not args.quiet:
print('Loading {}'.format(args.input_filename))
profile = BL3Profile(args.input_filename)
if not args.quiet:
print('')
# Check to see if we have any changes to make
have_changes = any([
args.golden_keys is not None,
args.diamond_keys is not None,
args.vaultcard1_keys is not None,
args.vaultcard1_chests is not None,
args.vaultcard2_keys is not None,
args.vaultcard2_chests is not None,
args.vaultcard3_keys is not None,
args.vaultcard3_chests is not None,
args.zero_guardian_rank,
args.min_guardian_rank,
args.guardian_rank_rewards is not None,
args.guardian_rank_tokens is not None,
args.reset_borderlands_science,
args.max_borderlands_science,
args.remove_borderlands_science_boosts,
args.borderlands_science_tokens is not None,
len(args.unlock) > 0,
args.import_items,
args.item_levels,
args.clear_customizations,
args.alpha,
args.item_mayhem_levels is not None,
])
# Alert about Guardian Rank stuff
guardian_rank_alert = False
# Make changes
if have_changes:
if not args.quiet:
print('Making requested changes...')
print('')
# Golden Keys
if args.golden_keys is not None:
if not args.quiet:
print(' - Setting Golden Key count to {}'.format(args.golden_keys))
profile.set_golden_keys(args.golden_keys)
# Diamond Keys
if args.diamond_keys is not None:
if not args.quiet:
print(' - Setting Diamond Key count to {}'.format(args.diamond_keys))
profile.set_diamond_keys(args.diamond_keys)
# Vault Card 1 Keys
if args.vaultcard1_keys is not None:
if not args.quiet:
print(' - Setting Vault Card 1 Key count to {}'.format(args.vaultcard1_keys))
profile.set_vaultcard1_keys(args.vaultcard1_keys)
# Vault Card 1 Chests
if args.vaultcard1_chests is not None:
if not args.quiet:
print(' - Setting Vault Card 1 Chest count to {}'.format(args.vaultcard1_chests))
profile.set_vaultcard1_chests(args.vaultcard1_chests)
# Vault Card 2 Keys
if args.vaultcard2_keys is not None:
if not args.quiet:
print(' - Setting Vault Card 2 Key count to {}'.format(args.vaultcard2_keys))
profile.set_vaultcard2_keys(args.vaultcard2_keys)
# Vault Card 2 Chests
if args.vaultcard2_chests is not None:
if not args.quiet:
print(' - Setting Vault Card 2 Chest count to {}'.format(args.vaultcard2_chests))
profile.set_vaultcard2_chests(args.vaultcard2_chests)
# Vault Card 3 Keys
if args.vaultcard3_keys is not None:
if not args.quiet:
print(' - Setting Vault Card 3 Key count to {}'.format(args.vaultcard3_keys))
profile.set_vaultcard3_keys(args.vaultcard3_keys)
# Vault Card 3 Chests
if args.vaultcard3_chests is not None:
if not args.quiet:
print(' - Setting Vault Card 3 Chest count to {}'.format(args.vaultcard3_chests))
profile.set_vaultcard3_chests(args.vaultcard3_chests)
# Zeroing Guardian Rank
if args.zero_guardian_rank:
if not args.quiet:
print(' - Zeroing Guardian Rank')
if not args.min_guardian_rank \
and args.guardian_rank_rewards is None \
and args.guardian_rank_tokens is None:
print(' NOTE: A profile with a zeroed Guardian Rank will probably have its')
print(' Guardian Rank info populated from the first savegame loaded by the game')
profile.zero_guardian_rank()
# Setting Guardian rank to Minimum
if args.min_guardian_rank:
if not args.quiet:
print(' - Setting Guardian Rank to minimum (to prevent overwriting by savefiles)')
new_gr = profile.min_guardian_rank()
if new_gr is not None and not args.quiet:
print(' - Guardian Rank set to {}'.format(new_gr))
guardian_rank_alert = True
# Setting arbitrary Guardian Rank rewards
if args.guardian_rank_rewards is not None:
if not args.quiet:
if args.guardian_rank_rewards == 1:
plural = ''
else:
plural = 's'
print(' - Setting Guardian Rank rewards to {} point{}'.format(args.guardian_rank_rewards, plural))
new_gr = profile.set_guardian_rank_reward_levels(args.guardian_rank_rewards, force=True)
if new_gr is not None and not args.quiet:
print(' - Also set Guardian Rank level to {}'.format(new_gr))
guardian_rank_alert = True
# Setting Guardian Rank tokens
if args.guardian_rank_tokens is not None:
if not args.quiet:
print(' - Setting available Guardian Rank tokens to {}'.format(args.guardian_rank_tokens))
new_gr = profile.set_guardian_rank_tokens(args.guardian_rank_tokens)
if new_gr is not None and not args.quiet:
print(' - Also set Guardian Rank level to {}'.format(new_gr))
guardian_rank_alert = True
# Reset Borderlands Science progression
if args.reset_borderlands_science:
if not args.quiet:
print(" - Resetting Borderlands Science progression")
profile.reset_borderlands_science()
if args.max_borderlands_science:
if not args.quiet:
print(" - Maximizing Borderlands Science progression")
profile.max_borderlands_science()
# Removing active Borderlands Science boosts
if args.remove_borderlands_science_boosts:
if not args.quiet:
print(" - Removing active Borderlands Science boosts")
profile.remove_borderlands_science_boosts()
# Setting Borderlands Science tokens
if args.borderlands_science_tokens is not None:
if not args.quiet:
print(" - Setting available Borderlands Science tokens to {}".format(args.borderlands_science_tokens))
profile.set_borderlands_science_tokens(args.borderlands_science_tokens)
# Clear Customizations (do this *before* explicit customization unlocks)
if args.clear_customizations:
if not args.quiet:
print(' - Clearing all customizations')
profile.clear_all_customizations()
# Unlocks
if len(args.unlock) > 0:
if not args.quiet:
print(' - Processing Unlocks:')
# Lost Loot
if 'lostloot' in args.unlock:
if not args.quiet:
print(' - Lost Loot SDUs')
profile.set_max_sdus([bl3save.PSDU_LOSTLOOT])
# Bank
if 'bank' in args.unlock:
if not args.quiet:
print(' - Bank SDUs')
profile.set_max_sdus([bl3save.PSDU_BANK])
# Skins
if 'skins' in args.unlock:
if not args.quiet:
print(' - Character Skins')
profile.unlock_char_skins()
# Heads
if 'heads' in args.unlock:
if not args.quiet:
print(' - Character Heads')
profile.unlock_char_heads()
# ECHO Themes
if 'echothemes' in args.unlock:
if not args.quiet:
print(' - ECHO Themes')
profile.unlock_echo_themes()
# Emotes
if 'emotes' in args.unlock:
if not args.quiet:
print(' - Emotes')
profile.unlock_emotes()
# Room Decorations
if 'decos' in args.unlock:
if not args.quiet:
print(' - Room Decorations')
profile.unlock_room_decos()
# Weapon Skins
if 'weaponskins' in args.unlock:
if not args.quiet:
print(' - Weapon Skins')
profile.unlock_weapon_skins()
# Weapon Trinkets
if 'trinkets' in args.unlock:
if not args.quiet:
print(' - Weapon Trinkets')
profile.unlock_weapon_trinkets()
# Customization Alphabetization
if args.alpha:
if not args.quiet:
print(' - Alphabetizing Room Decorations, Trinkets, and Weapon Skins')
profile.alphabetize_cosmetics()
# Import Items
if args.import_items:
cli_common.import_items(args.import_items,
profile.create_new_item_encoded,
profile.add_bank_item,
file_csv=args.csv,
allow_fabricator=args.allow_fabricator,
quiet=args.quiet,
)
# Setting item levels. Keep in mind that we'll want to do this *after*
# various of the actions above. If we've been asked to change the level
# of items, we'll want to do it after the item import.
if args.item_levels:
cli_common.update_item_levels(profile.get_bank_items(),
args.item_levels,
quiet=args.quiet,
)
# Item Mayhem level
if args.item_mayhem_levels is not None:
cli_common.update_item_mayhem_levels(profile.get_bank_items(),
args.item_mayhem_levels,
quiet=args.quiet,
)
# Guardian Rank Alert
if not args.quiet and guardian_rank_alert:
print(' - NOTE: Make sure to zero out your savegame Guardian Ranks, if making')
print(' changes to Guardian Rank in your profile, otherwise the changes might')
print(' not take effect properly.')
# Newline at the end of all this.
if not args.quiet:
print('')
# Write out
if args.output == 'profile':
profile.save_to(args.output_filename)
if not args.quiet:
print('Wrote profile to {}'.format(args.output_filename))
elif args.output == 'protobuf':
profile.save_protobuf_to(args.output_filename)
if not args.quiet:
print('Wrote protobuf to {}'.format(args.output_filename))
elif args.output == 'json':
profile.save_json_to(args.output_filename)
if not args.quiet:
print('Wrote JSON to {}'.format(args.output_filename))
elif args.output == 'items':
if args.csv:
cli_common.export_items_csv(
profile.get_bank_items(),
args.output_filename,
quiet=args.quiet,
)
else:
cli_common.export_items(
profile.get_bank_items(),
args.output_filename,
quiet=args.quiet,
)
else:
# Not sure how we'd ever get here
raise Exception('Invalid output format specified: {}'.format(args.output))
if __name__ == '__main__':
main()
| 38.616393 | 124 | 0.598022 |
import os
import sys
import bl3save
import argparse
from . import cli_common
from bl3save.bl3profile import BL3Profile
def main():
parser = argparse.ArgumentParser(
description='Borderlands 3 CLI Profile Editor v{} (PC Only)'.format(bl3save.__version__),
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
epilog="""
The default output type of "profile" will output theoretically-valid
profile which can be loaded into BL3. The output type "protobuf"
will save out the extracted, decrypted protobufs. The output
type "json" will output a JSON-encoded version of the protobufs
in question. The output type "items" will output a text file
containing base64-encoded representations of items in the user's
bank. These can be read back in using the -i/--import-items
option. Note that these are NOT the same as the item strings used
by the BL3 Memory Editor.
"""
)
parser.add_argument('-V', '--version',
action='version',
version='BL3 CLI SaveEdit v{}'.format(bl3save.__version__),
)
parser.add_argument('-o', '--output',
choices=['profile', 'protobuf', 'json', 'items'],
default='profile',
help='Output file format',
)
parser.add_argument('--csv',
action='store_true',
help='When importing or exporting items, use CSV files',
)
parser.add_argument('-f', '--force',
action='store_true',
help='Force output file to overwrite',
)
parser.add_argument('-q', '--quiet',
action='store_true',
help='Supress all non-essential output')
# Now the actual arguments
parser.add_argument('--golden-keys',
dest='golden_keys',
type=int,
help='Number of Golden Keys in the profile',
)
parser.add_argument('--diamond-keys',
dest='diamond_keys',
type=int,
help='Number of Diamond Keys in the profile',
)
parser.add_argument('--vaultcard1-keys',
dest='vaultcard1_keys',
type=int,
help='Number of Vault Card 1 Keys in the profile',
)
parser.add_argument('--vaultcard1-chests',
dest='vaultcard1_chests',
type=int,
help='Number of Vault Card 1 Chests available in the profile',
)
parser.add_argument('--vaultcard2-keys',
dest='vaultcard2_keys',
type=int,
help='Number of Vault Card 2 Keys in the profile',
)
parser.add_argument('--vaultcard2-chests',
dest='vaultcard2_chests',
type=int,
help='Number of Vault Card 2 Chests available in the profile',
)
parser.add_argument('--vaultcard3-keys',
dest='vaultcard3_keys',
type=int,
help='Number of Vault Card 3 Keys in the profile',
)
parser.add_argument('--vaultcard3-chests',
dest='vaultcard3_chests',
type=int,
help='Number of Vault Card 3 Chests available in the profile',
)
# Arguably we could be using a mutually-exclusive group for many of these
# GR options, but I can see some potential value in specifying more than
# one, so I'm not bothering.
parser.add_argument('--zero-guardian-rank',
dest='zero_guardian_rank',
action='store_true',
help='Zero out profile Guardian Rank',
)
parser.add_argument('--min-guardian-rank',
dest='min_guardian_rank',
action='store_true',
help='Set Guardian Rank to minimum required to prevent overwriting by saves',
)
parser.add_argument('--guardian-rank-rewards',
dest='guardian_rank_rewards',
type=int,
help='Set Guardian Rank rewards to the specified number of tokens each',
)
parser.add_argument('--guardian-rank-tokens',
dest='guardian_rank_tokens',
type=int,
help="Number of available Guardian Rank tokens",
)
parser.add_argument('--reset-borderlands-science',
dest='reset_borderlands_science',
action='store_true',
help='Reset Borderlands Science progression',
)
parser.add_argument('--max-borderlands-science',
dest='max_borderlands_science',
action='store_true',
help='Maximize Borderlands Science progression, unlocking True Tannis',
)
parser.add_argument('--remove-borderlands-science-boosts',
dest='remove_borderlands_science_boosts',
action='store_true',
help='Remove the currently active borderlands science boost',
)
parser.add_argument('--borderlands-science-tokens',
dest='borderlands_science_tokens',
type=int,
help="Number of available Borderlands Science tokens",
)
itemlevelgroup = parser.add_mutually_exclusive_group()
itemlevelgroup.add_argument('--item-levels-max',
dest='item_levels_max',
action='store_true',
help='Set all bank items to max level')
itemlevelgroup.add_argument('--item-levels',
dest='item_levels',
type=int,
help='Set all bank items to the specified level')
itemmayhemgroup = parser.add_mutually_exclusive_group()
itemmayhemgroup.add_argument('--item-mayhem-max',
dest='item_mayhem_max',
action='store_true',
help='Set all bank items to the maximum Mayhem level ({})'.format(bl3save.mayhem_max))
itemmayhemgroup.add_argument('--item-mayhem-levels',
dest='item_mayhem_levels',
type=int,
choices=range(bl3save.mayhem_max+1),
help='Set all bank items to the specified Mayhem level (0 to remove)')
parser.add_argument('-i', '--import-items',
dest='import_items',
type=str,
help='Import items from file',
)
parser.add_argument('--allow-fabricator',
dest='allow_fabricator',
action='store_true',
help='Allow importing Fabricator when importing items from file',
)
parser.add_argument('--clear-customizations',
dest='clear_customizations',
action='store_true',
help='Remove all unlocked customizations',
)
parser.add_argument('--alpha',
dest='alpha',
action='store_true',
help='Alphabetize unlocked room decorations, trinkets, and weapon skins',
)
unlock_choices = [
'lostloot', 'bank',
'skins', 'heads',
'echothemes', 'emotes', 'decos',
'weaponskins', 'trinkets',
'customizations',
]
parser.add_argument('--unlock',
action=cli_common.DictAction,
choices=unlock_choices + ['all'],
default={},
help='Game features to unlock',
)
parser.add_argument('input_filename',
help='Input filename',
)
parser.add_argument('output_filename',
help='Output filename',
)
args = parser.parse_args()
if 'all' in args.unlock:
args.unlock = {k: True for k in unlock_choices}
elif 'customizations' in args.unlock:
args.unlock['skins'] = True
args.unlock['heads'] = True
args.unlock['echothemes'] = True
args.unlock['emotes'] = True
args.unlock['decos'] = True
args.unlock['weaponskins'] = True
args.unlock['trinkets'] = True
if args.item_levels_max:
args.item_levels = bl3save.max_level
if args.item_mayhem_max:
args.item_mayhem_levels = bl3save.mayhem_max
if args.golden_keys is not None and args.golden_keys < 0:
raise argparse.ArgumentTypeError('Golden keys cannot be negative')
if args.diamond_keys is not None and args.diamond_keys < 0:
raise argparse.ArgumentTypeError('Diamond keys cannot be negative')
if args.vaultcard1_keys is not None and args.vaultcard1_keys < 0:
raise argparse.ArgumentTypeError('Vault Card 1 keys cannot be negative')
if args.vaultcard1_chests is not None and args.vaultcard1_chests < 0:
raise argparse.ArgumentTypeError('Vault Card 1 chests cannot be negative')
if args.vaultcard2_keys is not None and args.vaultcard2_keys < 0:
raise argparse.ArgumentTypeError('Vault Card 2 keys cannot be negative')
if args.vaultcard2_chests is not None and args.vaultcard2_chests < 0:
raise argparse.ArgumentTypeError('Vault Card 2 chests cannot be negative')
if args.vaultcard3_keys is not None and args.vaultcard3_keys < 0:
raise argparse.ArgumentTypeError('Vault Card 3 keys cannot be negative')
if args.vaultcard3_chests is not None and args.vaultcard3_chests < 0:
raise argparse.ArgumentTypeError('Vault Card 3 chests cannot be negative')
# Check item level. The max storeable in the serial number is 127, but the
# effective limit in-game is 100, thanks to MaxGameStage attributes. We
# could use `bl3save.max_level` here, too, of course, but in the event that
# I don't get this updated in a timely fashion, having it higher would let
if args.item_levels:
if args.item_levels < 1 or args.item_levels > 100:
raise argparse.ArgumentTypeError('Valid item level range is 1 through 100')
if args.item_levels > bl3save.max_level:
print('WARNING: Setting item levels to {}, when {} is the currently-known max'.format(
args.item_levels,
bl3save.max_level,
))
if os.path.exists(args.output_filename) and not args.force:
if args.output_filename == args.input_filename:
confirm_msg = 'Really overwrite {} with specified changes (no backup will be made)'.format(args.output_filename)
else:
confirm_msg = '{} already exists. Overwrite'.format(args.output_filename)
sys.stdout.write('WARNING: {} [y/N]? '.format(confirm_msg))
sys.stdout.flush()
response = sys.stdin.readline().strip().lower()
if len(response) == 0 or response[0] != 'y':
print('Aborting!')
sys.exit(1)
print('')
if not args.quiet:
print('Loading {}'.format(args.input_filename))
profile = BL3Profile(args.input_filename)
if not args.quiet:
print('')
have_changes = any([
args.golden_keys is not None,
args.diamond_keys is not None,
args.vaultcard1_keys is not None,
args.vaultcard1_chests is not None,
args.vaultcard2_keys is not None,
args.vaultcard2_chests is not None,
args.vaultcard3_keys is not None,
args.vaultcard3_chests is not None,
args.zero_guardian_rank,
args.min_guardian_rank,
args.guardian_rank_rewards is not None,
args.guardian_rank_tokens is not None,
args.reset_borderlands_science,
args.max_borderlands_science,
args.remove_borderlands_science_boosts,
args.borderlands_science_tokens is not None,
len(args.unlock) > 0,
args.import_items,
args.item_levels,
args.clear_customizations,
args.alpha,
args.item_mayhem_levels is not None,
])
guardian_rank_alert = False
if have_changes:
if not args.quiet:
print('Making requested changes...')
print('')
if args.golden_keys is not None:
if not args.quiet:
print(' - Setting Golden Key count to {}'.format(args.golden_keys))
profile.set_golden_keys(args.golden_keys)
if args.diamond_keys is not None:
if not args.quiet:
print(' - Setting Diamond Key count to {}'.format(args.diamond_keys))
profile.set_diamond_keys(args.diamond_keys)
if args.vaultcard1_keys is not None:
if not args.quiet:
print(' - Setting Vault Card 1 Key count to {}'.format(args.vaultcard1_keys))
profile.set_vaultcard1_keys(args.vaultcard1_keys)
if args.vaultcard1_chests is not None:
if not args.quiet:
print(' - Setting Vault Card 1 Chest count to {}'.format(args.vaultcard1_chests))
profile.set_vaultcard1_chests(args.vaultcard1_chests)
if args.vaultcard2_keys is not None:
if not args.quiet:
print(' - Setting Vault Card 2 Key count to {}'.format(args.vaultcard2_keys))
profile.set_vaultcard2_keys(args.vaultcard2_keys)
if args.vaultcard2_chests is not None:
if not args.quiet:
print(' - Setting Vault Card 2 Chest count to {}'.format(args.vaultcard2_chests))
profile.set_vaultcard2_chests(args.vaultcard2_chests)
if args.vaultcard3_keys is not None:
if not args.quiet:
print(' - Setting Vault Card 3 Key count to {}'.format(args.vaultcard3_keys))
profile.set_vaultcard3_keys(args.vaultcard3_keys)
if args.vaultcard3_chests is not None:
if not args.quiet:
print(' - Setting Vault Card 3 Chest count to {}'.format(args.vaultcard3_chests))
profile.set_vaultcard3_chests(args.vaultcard3_chests)
if args.zero_guardian_rank:
if not args.quiet:
print(' - Zeroing Guardian Rank')
if not args.min_guardian_rank \
and args.guardian_rank_rewards is None \
and args.guardian_rank_tokens is None:
print(' NOTE: A profile with a zeroed Guardian Rank will probably have its')
print(' Guardian Rank info populated from the first savegame loaded by the game')
profile.zero_guardian_rank()
if args.min_guardian_rank:
if not args.quiet:
print(' - Setting Guardian Rank to minimum (to prevent overwriting by savefiles)')
new_gr = profile.min_guardian_rank()
if new_gr is not None and not args.quiet:
print(' - Guardian Rank set to {}'.format(new_gr))
guardian_rank_alert = True
if args.guardian_rank_rewards is not None:
if not args.quiet:
if args.guardian_rank_rewards == 1:
plural = ''
else:
plural = 's'
print(' - Setting Guardian Rank rewards to {} point{}'.format(args.guardian_rank_rewards, plural))
new_gr = profile.set_guardian_rank_reward_levels(args.guardian_rank_rewards, force=True)
if new_gr is not None and not args.quiet:
print(' - Also set Guardian Rank level to {}'.format(new_gr))
guardian_rank_alert = True
if args.guardian_rank_tokens is not None:
if not args.quiet:
print(' - Setting available Guardian Rank tokens to {}'.format(args.guardian_rank_tokens))
new_gr = profile.set_guardian_rank_tokens(args.guardian_rank_tokens)
if new_gr is not None and not args.quiet:
print(' - Also set Guardian Rank level to {}'.format(new_gr))
guardian_rank_alert = True
if args.reset_borderlands_science:
if not args.quiet:
print(" - Resetting Borderlands Science progression")
profile.reset_borderlands_science()
if args.max_borderlands_science:
if not args.quiet:
print(" - Maximizing Borderlands Science progression")
profile.max_borderlands_science()
if args.remove_borderlands_science_boosts:
if not args.quiet:
print(" - Removing active Borderlands Science boosts")
profile.remove_borderlands_science_boosts()
if args.borderlands_science_tokens is not None:
if not args.quiet:
print(" - Setting available Borderlands Science tokens to {}".format(args.borderlands_science_tokens))
profile.set_borderlands_science_tokens(args.borderlands_science_tokens)
if args.clear_customizations:
if not args.quiet:
print(' - Clearing all customizations')
profile.clear_all_customizations()
if len(args.unlock) > 0:
if not args.quiet:
print(' - Processing Unlocks:')
if 'lostloot' in args.unlock:
if not args.quiet:
print(' - Lost Loot SDUs')
profile.set_max_sdus([bl3save.PSDU_LOSTLOOT])
if 'bank' in args.unlock:
if not args.quiet:
print(' - Bank SDUs')
profile.set_max_sdus([bl3save.PSDU_BANK])
if 'skins' in args.unlock:
if not args.quiet:
print(' - Character Skins')
profile.unlock_char_skins()
if 'heads' in args.unlock:
if not args.quiet:
print(' - Character Heads')
profile.unlock_char_heads()
if 'echothemes' in args.unlock:
if not args.quiet:
print(' - ECHO Themes')
profile.unlock_echo_themes()
if 'emotes' in args.unlock:
if not args.quiet:
print(' - Emotes')
profile.unlock_emotes()
if 'decos' in args.unlock:
if not args.quiet:
print(' - Room Decorations')
profile.unlock_room_decos()
if 'weaponskins' in args.unlock:
if not args.quiet:
print(' - Weapon Skins')
profile.unlock_weapon_skins()
if 'trinkets' in args.unlock:
if not args.quiet:
print(' - Weapon Trinkets')
profile.unlock_weapon_trinkets()
if args.alpha:
if not args.quiet:
print(' - Alphabetizing Room Decorations, Trinkets, and Weapon Skins')
profile.alphabetize_cosmetics()
if args.import_items:
cli_common.import_items(args.import_items,
profile.create_new_item_encoded,
profile.add_bank_item,
file_csv=args.csv,
allow_fabricator=args.allow_fabricator,
quiet=args.quiet,
)
# various of the actions above. If we've been asked to change the level
if args.item_levels:
cli_common.update_item_levels(profile.get_bank_items(),
args.item_levels,
quiet=args.quiet,
)
# Item Mayhem level
if args.item_mayhem_levels is not None:
cli_common.update_item_mayhem_levels(profile.get_bank_items(),
args.item_mayhem_levels,
quiet=args.quiet,
)
# Guardian Rank Alert
if not args.quiet and guardian_rank_alert:
print(' - NOTE: Make sure to zero out your savegame Guardian Ranks, if making')
print(' changes to Guardian Rank in your profile, otherwise the changes might')
print(' not take effect properly.')
# Newline at the end of all this.
if not args.quiet:
print('')
# Write out
if args.output == 'profile':
profile.save_to(args.output_filename)
if not args.quiet:
print('Wrote profile to {}'.format(args.output_filename))
elif args.output == 'protobuf':
profile.save_protobuf_to(args.output_filename)
if not args.quiet:
print('Wrote protobuf to {}'.format(args.output_filename))
elif args.output == 'json':
profile.save_json_to(args.output_filename)
if not args.quiet:
print('Wrote JSON to {}'.format(args.output_filename))
elif args.output == 'items':
if args.csv:
cli_common.export_items_csv(
profile.get_bank_items(),
args.output_filename,
quiet=args.quiet,
)
else:
cli_common.export_items(
profile.get_bank_items(),
args.output_filename,
quiet=args.quiet,
)
else:
# Not sure how we'd ever get here
raise Exception('Invalid output format specified: {}'.format(args.output))
if __name__ == '__main__':
main()
| true | true |
1c3822d343c152c3c84298afdba5a208cab24a5a | 62,879 | py | Python | src/pretix/api/serializers/order.py | fakegit/pretix | b6e9e64ff967f7b4f91fe88694f4157d8a0787b4 | [
"Apache-2.0"
] | null | null | null | src/pretix/api/serializers/order.py | fakegit/pretix | b6e9e64ff967f7b4f91fe88694f4157d8a0787b4 | [
"Apache-2.0"
] | 56 | 2020-05-07T07:54:17.000Z | 2021-04-19T12:14:14.000Z | src/pretix/api/serializers/order.py | fakegit/pretix | b6e9e64ff967f7b4f91fe88694f4157d8a0787b4 | [
"Apache-2.0"
] | null | null | null | #
# This file is part of pretix (Community Edition).
#
# Copyright (C) 2014-2020 Raphael Michel and contributors
# Copyright (C) 2020-2021 rami.io GmbH and contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License as published by the Free Software Foundation in version 3 of the License.
#
# ADDITIONAL TERMS APPLY: Pursuant to Section 7 of the GNU Affero General Public License, additional terms are
# applicable granting you additional permissions and placing additional restrictions on your usage of this software.
# Please refer to the pretix LICENSE file to obtain the full terms applicable to this work. If you did not receive
# this file, see <https://pretix.eu/about/en/license>.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along with this program. If not, see
# <https://www.gnu.org/licenses/>.
#
import json
from collections import Counter, defaultdict
from decimal import Decimal
import pycountry
from django.core.files import File
from django.db.models import F, Q
from django.utils.timezone import now
from django.utils.translation import gettext_lazy
from django_countries.fields import Country
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from rest_framework.relations import SlugRelatedField
from rest_framework.reverse import reverse
from pretix.api.serializers.event import SubEventSerializer
from pretix.api.serializers.i18n import I18nAwareModelSerializer
from pretix.api.serializers.item import (
InlineItemVariationSerializer, ItemSerializer,
)
from pretix.base.channels import get_all_sales_channels
from pretix.base.decimal import round_decimal
from pretix.base.i18n import language
from pretix.base.models import (
CachedFile, Checkin, Invoice, InvoiceAddress, InvoiceLine, Item,
ItemVariation, Order, OrderPosition, Question, QuestionAnswer, Seat,
SubEvent, TaxRule, Voucher,
)
from pretix.base.models.orders import (
CartPosition, OrderFee, OrderPayment, OrderRefund, RevokedTicketSecret,
)
from pretix.base.pdf import get_images, get_variables
from pretix.base.services.cart import error_messages
from pretix.base.services.locking import NoLockManager
from pretix.base.services.pricing import get_price
from pretix.base.settings import COUNTRIES_WITH_STATE_IN_ADDRESS
from pretix.base.signals import register_ticket_outputs
from pretix.multidomain.urlreverse import build_absolute_uri
class CompatibleCountryField(serializers.Field):
def to_internal_value(self, data):
return {self.field_name: Country(data)}
def to_representation(self, instance: InvoiceAddress):
if instance.country:
return str(instance.country)
elif hasattr(instance, 'country_old'):
return instance.country_old
class CountryField(serializers.Field):
def to_internal_value(self, data):
return {self.field_name: Country(data)}
def to_representation(self, src):
return str(src) if src else None
class InvoiceAddressSerializer(I18nAwareModelSerializer):
country = CompatibleCountryField(source='*')
name = serializers.CharField(required=False)
class Meta:
model = InvoiceAddress
fields = ('last_modified', 'is_business', 'company', 'name', 'name_parts', 'street', 'zipcode', 'city', 'country',
'state', 'vat_id', 'vat_id_validated', 'internal_reference')
read_only_fields = ('last_modified',)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for v in self.fields.values():
v.required = False
v.allow_blank = True
def validate(self, data):
if data.get('name') and data.get('name_parts'):
raise ValidationError(
{'name': ['Do not specify name if you specified name_parts.']}
)
if data.get('name_parts') and '_scheme' not in data.get('name_parts'):
data['name_parts']['_scheme'] = self.context['request'].event.settings.name_scheme
if data.get('country'):
if not pycountry.countries.get(alpha_2=data.get('country').code):
raise ValidationError(
{'country': ['Invalid country code.']}
)
if data.get('state'):
cc = str(data.get('country') or self.instance.country or '')
if cc not in COUNTRIES_WITH_STATE_IN_ADDRESS:
raise ValidationError(
{'state': ['States are not supported in country "{}".'.format(cc)]}
)
if not pycountry.subdivisions.get(code=cc + '-' + data.get('state')):
raise ValidationError(
{'state': ['"{}" is not a known subdivision of the country "{}".'.format(data.get('state'), cc)]}
)
return data
class AnswerQuestionIdentifierField(serializers.Field):
def to_representation(self, instance: QuestionAnswer):
return instance.question.identifier
class AnswerQuestionOptionsIdentifierField(serializers.Field):
def to_representation(self, instance: QuestionAnswer):
if isinstance(instance, WrappedModel) or instance.pk:
return [o.identifier for o in instance.options.all()]
return []
class InlineSeatSerializer(I18nAwareModelSerializer):
class Meta:
model = Seat
fields = ('id', 'name', 'seat_guid')
class AnswerSerializer(I18nAwareModelSerializer):
question_identifier = AnswerQuestionIdentifierField(source='*', read_only=True)
option_identifiers = AnswerQuestionOptionsIdentifierField(source='*', read_only=True)
def to_representation(self, instance):
r = super().to_representation(instance)
if r['answer'].startswith('file://') and instance.orderposition:
r['answer'] = reverse('api-v1:orderposition-answer', kwargs={
'organizer': instance.orderposition.order.event.organizer.slug,
'event': instance.orderposition.order.event.slug,
'pk': instance.orderposition.pk,
'question': instance.question_id,
}, request=self.context['request'])
return r
class Meta:
model = QuestionAnswer
fields = ('question', 'answer', 'question_identifier', 'options', 'option_identifiers')
def validate_question(self, q):
if q.event != self.context['event']:
raise ValidationError(
'The specified question does not belong to this event.'
)
return q
def _handle_file_upload(self, data):
if data['answer'] == 'file:keep':
return data
try:
ao = self.context["request"].user or self.context["request"].auth
cf = CachedFile.objects.get(
session_key=f'api-upload-{str(type(ao))}-{ao.pk}',
file__isnull=False,
pk=data['answer'][len("file:"):],
)
except (ValidationError, IndexError): # invalid uuid
raise ValidationError('The submitted file ID "{fid}" was not found.'.format(fid=data))
except CachedFile.DoesNotExist:
raise ValidationError('The submitted file ID "{fid}" was not found.'.format(fid=data))
allowed_types = (
'image/png', 'image/jpeg', 'image/gif', 'application/pdf'
)
if cf.type not in allowed_types:
raise ValidationError('The submitted file "{fid}" has a file type that is not allowed in this field.'.format(fid=data))
if cf.file.size > 10 * 1024 * 1024:
raise ValidationError('The submitted file "{fid}" is too large to be used in this field.'.format(fid=data))
data['options'] = []
data['answer'] = cf.file
return data
def validate(self, data):
if data.get('question').type == Question.TYPE_FILE:
return self._handle_file_upload(data)
elif data.get('question').type in (Question.TYPE_CHOICE, Question.TYPE_CHOICE_MULTIPLE):
if not data.get('options'):
raise ValidationError(
'You need to specify options if the question is of a choice type.'
)
if data.get('question').type == Question.TYPE_CHOICE and len(data.get('options')) > 1:
raise ValidationError(
'You can specify at most one option for this question.'
)
for o in data.get('options'):
if o.question_id != data.get('question').pk:
raise ValidationError(
'The specified option does not belong to this question.'
)
data['answer'] = ", ".join([str(o) for o in data.get('options')])
else:
if data.get('options'):
raise ValidationError(
'You should not specify options if the question is not of a choice type.'
)
if data.get('question').type == Question.TYPE_BOOLEAN:
if data.get('answer') in ['true', 'True', '1', 'TRUE']:
data['answer'] = 'True'
elif data.get('answer') in ['false', 'False', '0', 'FALSE']:
data['answer'] = 'False'
else:
raise ValidationError(
'Please specify "true" or "false" for boolean questions.'
)
elif data.get('question').type == Question.TYPE_NUMBER:
serializers.DecimalField(
max_digits=50,
decimal_places=25
).to_internal_value(data.get('answer'))
elif data.get('question').type == Question.TYPE_DATE:
data['answer'] = serializers.DateField().to_internal_value(data.get('answer'))
elif data.get('question').type == Question.TYPE_TIME:
data['answer'] = serializers.TimeField().to_internal_value(data.get('answer'))
elif data.get('question').type == Question.TYPE_DATETIME:
data['answer'] = serializers.DateTimeField().to_internal_value(data.get('answer'))
return data
class CheckinSerializer(I18nAwareModelSerializer):
class Meta:
model = Checkin
fields = ('id', 'datetime', 'list', 'auto_checked_in', 'type')
class OrderDownloadsField(serializers.Field):
def to_representation(self, instance: Order):
if instance.status != Order.STATUS_PAID:
if instance.status != Order.STATUS_PENDING or instance.require_approval or not instance.event.settings.ticket_download_pending:
return []
request = self.context['request']
res = []
responses = register_ticket_outputs.send(instance.event)
for receiver, response in responses:
provider = response(instance.event)
if provider.is_enabled:
res.append({
'output': provider.identifier,
'url': reverse('api-v1:order-download', kwargs={
'organizer': instance.event.organizer.slug,
'event': instance.event.slug,
'code': instance.code,
'output': provider.identifier,
}, request=request)
})
return res
class PositionDownloadsField(serializers.Field):
def to_representation(self, instance: OrderPosition):
if instance.order.status != Order.STATUS_PAID:
if instance.order.status != Order.STATUS_PENDING or instance.order.require_approval or not instance.order.event.settings.ticket_download_pending:
return []
if not instance.generate_ticket:
return []
request = self.context['request']
res = []
responses = register_ticket_outputs.send(instance.order.event)
for receiver, response in responses:
provider = response(instance.order.event)
if provider.is_enabled:
res.append({
'output': provider.identifier,
'url': reverse('api-v1:orderposition-download', kwargs={
'organizer': instance.order.event.organizer.slug,
'event': instance.order.event.slug,
'pk': instance.pk,
'output': provider.identifier,
}, request=request)
})
return res
class PdfDataSerializer(serializers.Field):
def to_representation(self, instance: OrderPosition):
res = {}
ev = instance.subevent or instance.order.event
with language(instance.order.locale, instance.order.event.settings.region):
# This needs to have some extra performance improvements to avoid creating hundreds of queries when
# we serialize a list.
if 'vars' not in self.context:
self.context['vars'] = get_variables(self.context['request'].event)
if 'vars_images' not in self.context:
self.context['vars_images'] = get_images(self.context['request'].event)
for k, f in self.context['vars'].items():
res[k] = f['evaluate'](instance, instance.order, ev)
if not hasattr(ev, '_cached_meta_data'):
ev._cached_meta_data = ev.meta_data
for k, v in ev._cached_meta_data.items():
res['meta:' + k] = v
if not hasattr(instance.item, '_cached_meta_data'):
instance.item._cached_meta_data = instance.item.meta_data
for k, v in instance.item._cached_meta_data.items():
res['itemmeta:' + k] = v
res['images'] = {}
for k, f in self.context['vars_images'].items():
if 'etag' in f:
has_image = etag = f['etag'](instance, instance.order, ev)
else:
has_image = f['etag'](instance, instance.order, ev)
etag = None
if has_image:
url = reverse('api-v1:orderposition-pdf_image', kwargs={
'organizer': instance.order.event.organizer.slug,
'event': instance.order.event.slug,
'pk': instance.pk,
'key': k,
}, request=self.context['request'])
if etag:
url += f'#etag={etag}'
res['images'][k] = url
else:
res['images'][k] = None
return res
class OrderPositionSerializer(I18nAwareModelSerializer):
checkins = CheckinSerializer(many=True, read_only=True)
answers = AnswerSerializer(many=True)
downloads = PositionDownloadsField(source='*', read_only=True)
order = serializers.SlugRelatedField(slug_field='code', read_only=True)
pdf_data = PdfDataSerializer(source='*', read_only=True)
seat = InlineSeatSerializer(read_only=True)
country = CompatibleCountryField(source='*')
attendee_name = serializers.CharField(required=False)
class Meta:
model = OrderPosition
fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
'company', 'street', 'zipcode', 'city', 'country', 'state',
'attendee_email', 'voucher', 'tax_rate', 'tax_value', 'secret', 'addon_to', 'subevent', 'checkins',
'downloads', 'answers', 'tax_rule', 'pseudonymization_id', 'pdf_data', 'seat', 'canceled')
read_only_fields = (
'id', 'order', 'positionid', 'item', 'variation', 'price', 'voucher', 'tax_rate', 'tax_value', 'secret',
'addon_to', 'subevent', 'checkins', 'downloads', 'answers', 'tax_rule', 'pseudonymization_id', 'pdf_data',
'seat', 'canceled'
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
request = self.context.get('request')
if request and (not request.query_params.get('pdf_data', 'false') == 'true' or 'can_view_orders' not in request.eventpermset):
self.fields.pop('pdf_data', None)
def validate(self, data):
if data.get('attendee_name') and data.get('attendee_name_parts'):
raise ValidationError(
{'attendee_name': ['Do not specify attendee_name if you specified attendee_name_parts.']}
)
if data.get('attendee_name_parts') and '_scheme' not in data.get('attendee_name_parts'):
data['attendee_name_parts']['_scheme'] = self.context['request'].event.settings.name_scheme
if data.get('country'):
if not pycountry.countries.get(alpha_2=data.get('country').code):
raise ValidationError(
{'country': ['Invalid country code.']}
)
if data.get('state'):
cc = str(data.get('country') or self.instance.country or '')
if cc not in COUNTRIES_WITH_STATE_IN_ADDRESS:
raise ValidationError(
{'state': ['States are not supported in country "{}".'.format(cc)]}
)
if not pycountry.subdivisions.get(code=cc + '-' + data.get('state')):
raise ValidationError(
{'state': ['"{}" is not a known subdivision of the country "{}".'.format(data.get('state'), cc)]}
)
return data
def update(self, instance, validated_data):
# Even though all fields that shouldn't be edited are marked as read_only in the serializer
# (hopefully), we'll be extra careful here and be explicit about the model fields we update.
update_fields = [
'attendee_name_parts', 'company', 'street', 'zipcode', 'city', 'country',
'state', 'attendee_email',
]
answers_data = validated_data.pop('answers', None)
name = validated_data.pop('attendee_name', '')
if name and not validated_data.get('attendee_name_parts'):
validated_data['attendee_name_parts'] = {
'_legacy': name
}
for attr, value in validated_data.items():
if attr in update_fields:
setattr(instance, attr, value)
instance.save(update_fields=update_fields)
if answers_data is not None:
qs_seen = set()
answercache = {
a.question_id: a for a in instance.answers.all()
}
for answ_data in answers_data:
options = answ_data.pop('options', [])
if answ_data['question'].pk in qs_seen:
raise ValidationError(f'Question {answ_data["question"]} was sent twice.')
if answ_data['question'].pk in answercache:
a = answercache[answ_data['question'].pk]
if isinstance(answ_data['answer'], File):
a.file.save(answ_data['answer'].name, answ_data['answer'], save=False)
a.answer = 'file://' + a.file.name
elif a.answer.startswith('file://') and answ_data['answer'] == "file:keep":
pass # keep current file
else:
for attr, value in answ_data.items():
setattr(a, attr, value)
a.save()
else:
if isinstance(answ_data['answer'], File):
an = answ_data.pop('answer')
a = instance.answers.create(**answ_data, answer='')
a.file.save(an.name, an, save=False)
a.answer = 'file://' + a.file.name
a.save()
else:
a = instance.answers.create(**answ_data)
a.options.set(options)
qs_seen.add(a.question_id)
for qid, a in answercache.items():
if qid not in qs_seen:
a.delete()
return instance
class RequireAttentionField(serializers.Field):
def to_representation(self, instance: OrderPosition):
return instance.order.checkin_attention or instance.item.checkin_attention
class AttendeeNameField(serializers.Field):
def to_representation(self, instance: OrderPosition):
an = instance.attendee_name
if not an:
if instance.addon_to_id:
an = instance.addon_to.attendee_name
if not an:
try:
an = instance.order.invoice_address.name
except InvoiceAddress.DoesNotExist:
pass
return an
class AttendeeNamePartsField(serializers.Field):
def to_representation(self, instance: OrderPosition):
an = instance.attendee_name
p = instance.attendee_name_parts
if not an:
if instance.addon_to_id:
an = instance.addon_to.attendee_name
p = instance.addon_to.attendee_name_parts
if not an:
try:
p = instance.order.invoice_address.name_parts
except InvoiceAddress.DoesNotExist:
pass
return p
class CheckinListOrderPositionSerializer(OrderPositionSerializer):
require_attention = RequireAttentionField(source='*')
attendee_name = AttendeeNameField(source='*')
attendee_name_parts = AttendeeNamePartsField(source='*')
order__status = serializers.SlugRelatedField(read_only=True, slug_field='status', source='order')
class Meta:
model = OrderPosition
fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
'company', 'street', 'zipcode', 'city', 'country', 'state',
'attendee_email', 'voucher', 'tax_rate', 'tax_value', 'secret', 'addon_to', 'subevent', 'checkins',
'downloads', 'answers', 'tax_rule', 'pseudonymization_id', 'pdf_data', 'seat', 'require_attention',
'order__status')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if 'subevent' in self.context['request'].query_params.getlist('expand'):
self.fields['subevent'] = SubEventSerializer(read_only=True)
if 'item' in self.context['request'].query_params.getlist('expand'):
self.fields['item'] = ItemSerializer(read_only=True)
if 'variation' in self.context['request'].query_params.getlist('expand'):
self.fields['variation'] = InlineItemVariationSerializer(read_only=True)
class OrderPaymentTypeField(serializers.Field):
# TODO: Remove after pretix 2.2
def to_representation(self, instance: Order):
t = None
for p in instance.payments.all():
t = p.provider
return t
class OrderPaymentDateField(serializers.DateField):
# TODO: Remove after pretix 2.2
def to_representation(self, instance: Order):
t = None
for p in instance.payments.all():
t = p.payment_date or t
if t:
return super().to_representation(t.date())
class OrderFeeSerializer(I18nAwareModelSerializer):
class Meta:
model = OrderFee
fields = ('fee_type', 'value', 'description', 'internal_type', 'tax_rate', 'tax_value', 'tax_rule', 'canceled')
class PaymentURLField(serializers.URLField):
def to_representation(self, instance: OrderPayment):
if instance.state != OrderPayment.PAYMENT_STATE_CREATED:
return None
return build_absolute_uri(self.context['event'], 'presale:event.order.pay', kwargs={
'order': instance.order.code,
'secret': instance.order.secret,
'payment': instance.pk,
})
class PaymentDetailsField(serializers.Field):
def to_representation(self, value: OrderPayment):
pp = value.payment_provider
if not pp:
return {}
return pp.api_payment_details(value)
class OrderPaymentSerializer(I18nAwareModelSerializer):
payment_url = PaymentURLField(source='*', allow_null=True, read_only=True)
details = PaymentDetailsField(source='*', allow_null=True, read_only=True)
class Meta:
model = OrderPayment
fields = ('local_id', 'state', 'amount', 'created', 'payment_date', 'provider', 'payment_url',
'details')
class OrderRefundSerializer(I18nAwareModelSerializer):
payment = SlugRelatedField(slug_field='local_id', read_only=True)
class Meta:
model = OrderRefund
fields = ('local_id', 'state', 'source', 'amount', 'payment', 'created', 'execution_date', 'comment', 'provider')
class OrderURLField(serializers.URLField):
def to_representation(self, instance: Order):
return build_absolute_uri(self.context['event'], 'presale:event.order', kwargs={
'order': instance.code,
'secret': instance.secret,
})
class OrderSerializer(I18nAwareModelSerializer):
invoice_address = InvoiceAddressSerializer(allow_null=True)
positions = OrderPositionSerializer(many=True, read_only=True)
fees = OrderFeeSerializer(many=True, read_only=True)
downloads = OrderDownloadsField(source='*', read_only=True)
payments = OrderPaymentSerializer(many=True, read_only=True)
refunds = OrderRefundSerializer(many=True, read_only=True)
payment_date = OrderPaymentDateField(source='*', read_only=True)
payment_provider = OrderPaymentTypeField(source='*', read_only=True)
url = OrderURLField(source='*', read_only=True)
class Meta:
model = Order
fields = (
'code', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date',
'payment_provider', 'fees', 'total', 'comment', 'invoice_address', 'positions', 'downloads',
'checkin_attention', 'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel',
'url'
)
read_only_fields = (
'code', 'status', 'testmode', 'secret', 'datetime', 'expires', 'payment_date',
'payment_provider', 'fees', 'total', 'positions', 'downloads',
'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel'
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self.context['request'].query_params.get('pdf_data', 'false') == 'true':
self.fields['positions'].child.fields.pop('pdf_data', None)
for exclude_field in self.context['request'].query_params.getlist('exclude'):
p = exclude_field.split('.')
if p[0] in self.fields:
if len(p) == 1:
del self.fields[p[0]]
elif len(p) == 2:
self.fields[p[0]].child.fields.pop(p[1])
def validate_locale(self, l):
if l not in set(k for k in self.instance.event.settings.locales):
raise ValidationError('"{}" is not a supported locale for this event.'.format(l))
return l
def update(self, instance, validated_data):
# Even though all fields that shouldn't be edited are marked as read_only in the serializer
# (hopefully), we'll be extra careful here and be explicit about the model fields we update.
update_fields = ['comment', 'checkin_attention', 'email', 'locale', 'phone']
if 'invoice_address' in validated_data:
iadata = validated_data.pop('invoice_address')
if not iadata:
try:
instance.invoice_address.delete()
except InvoiceAddress.DoesNotExist:
pass
else:
name = iadata.pop('name', '')
if name and not iadata.get('name_parts'):
iadata['name_parts'] = {
'_legacy': name
}
try:
ia = instance.invoice_address
if iadata.get('vat_id') != ia.vat_id and 'vat_id_validated' not in iadata:
ia.vat_id_validated = False
self.fields['invoice_address'].update(ia, iadata)
except InvoiceAddress.DoesNotExist:
InvoiceAddress.objects.create(order=instance, **iadata)
for attr, value in validated_data.items():
if attr in update_fields:
setattr(instance, attr, value)
instance.save(update_fields=update_fields)
return instance
class AnswerQuestionOptionsField(serializers.Field):
def to_representation(self, instance: QuestionAnswer):
return [o.pk for o in instance.options.all()]
class SimulatedAnswerSerializer(AnswerSerializer):
options = AnswerQuestionOptionsField(read_only=True, source='*')
class SimulatedOrderPositionSerializer(OrderPositionSerializer):
answers = SimulatedAnswerSerializer(many=True)
addon_to = serializers.SlugRelatedField(read_only=True, slug_field='positionid')
class SimulatedOrderSerializer(OrderSerializer):
positions = SimulatedOrderPositionSerializer(many=True, read_only=True)
class PriceCalcSerializer(serializers.Serializer):
item = serializers.PrimaryKeyRelatedField(queryset=Item.objects.none(), required=False, allow_null=True)
variation = serializers.PrimaryKeyRelatedField(queryset=ItemVariation.objects.none(), required=False, allow_null=True)
subevent = serializers.PrimaryKeyRelatedField(queryset=SubEvent.objects.none(), required=False, allow_null=True)
tax_rule = serializers.PrimaryKeyRelatedField(queryset=TaxRule.objects.none(), required=False, allow_null=True)
locale = serializers.CharField(allow_null=True, required=False)
def __init__(self, *args, **kwargs):
event = kwargs.pop('event')
super().__init__(*args, **kwargs)
self.fields['item'].queryset = event.items.all()
self.fields['tax_rule'].queryset = event.tax_rules.all()
self.fields['variation'].queryset = ItemVariation.objects.filter(item__event=event)
if event.has_subevents:
self.fields['subevent'].queryset = event.subevents.all()
else:
del self.fields['subevent']
class AnswerCreateSerializer(AnswerSerializer):
pass
class OrderFeeCreateSerializer(I18nAwareModelSerializer):
_treat_value_as_percentage = serializers.BooleanField(default=False, required=False)
_split_taxes_like_products = serializers.BooleanField(default=False, required=False)
class Meta:
model = OrderFee
fields = ('fee_type', 'value', 'description', 'internal_type', 'tax_rule',
'_treat_value_as_percentage', '_split_taxes_like_products')
def validate_tax_rule(self, tr):
if tr and tr.event != self.context['event']:
raise ValidationError(
'The specified tax rate does not belong to this event.'
)
return tr
class OrderPositionCreateSerializer(I18nAwareModelSerializer):
answers = AnswerCreateSerializer(many=True, required=False)
addon_to = serializers.IntegerField(required=False, allow_null=True)
secret = serializers.CharField(required=False)
attendee_name = serializers.CharField(required=False, allow_null=True)
seat = serializers.CharField(required=False, allow_null=True)
price = serializers.DecimalField(required=False, allow_null=True, decimal_places=2,
max_digits=10)
voucher = serializers.SlugRelatedField(slug_field='code', queryset=Voucher.objects.none(),
required=False, allow_null=True)
country = CompatibleCountryField(source='*')
class Meta:
model = OrderPosition
fields = ('positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts', 'attendee_email',
'company', 'street', 'zipcode', 'city', 'country', 'state',
'secret', 'addon_to', 'subevent', 'answers', 'seat', 'voucher')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for k, v in self.fields.items():
if k in ('company', 'street', 'zipcode', 'city', 'country', 'state'):
v.required = False
v.allow_blank = True
v.allow_null = True
def validate_secret(self, secret):
if secret and OrderPosition.all.filter(order__event=self.context['event'], secret=secret).exists():
raise ValidationError(
'You cannot assign a position secret that already exists.'
)
return secret
def validate_item(self, item):
if item.event != self.context['event']:
raise ValidationError(
'The specified item does not belong to this event.'
)
if not item.active:
raise ValidationError(
'The specified item is not active.'
)
return item
def validate_subevent(self, subevent):
if self.context['event'].has_subevents:
if not subevent:
raise ValidationError(
'You need to set a subevent.'
)
if subevent.event != self.context['event']:
raise ValidationError(
'The specified subevent does not belong to this event.'
)
elif subevent:
raise ValidationError(
'You cannot set a subevent for this event.'
)
return subevent
def validate(self, data):
if data.get('item'):
if data.get('item').has_variations:
if not data.get('variation'):
raise ValidationError({'variation': ['You should specify a variation for this item.']})
else:
if data.get('variation').item != data.get('item'):
raise ValidationError(
{'variation': ['The specified variation does not belong to the specified item.']}
)
elif data.get('variation'):
raise ValidationError(
{'variation': ['You cannot specify a variation for this item.']}
)
if data.get('attendee_name') and data.get('attendee_name_parts'):
raise ValidationError(
{'attendee_name': ['Do not specify attendee_name if you specified attendee_name_parts.']}
)
if data.get('attendee_name_parts') and '_scheme' not in data.get('attendee_name_parts'):
data['attendee_name_parts']['_scheme'] = self.context['request'].event.settings.name_scheme
if data.get('country'):
if not pycountry.countries.get(alpha_2=data.get('country').code):
raise ValidationError(
{'country': ['Invalid country code.']}
)
if data.get('state'):
cc = str(data.get('country') or self.instance.country or '')
if cc not in COUNTRIES_WITH_STATE_IN_ADDRESS:
raise ValidationError(
{'state': ['States are not supported in country "{}".'.format(cc)]}
)
if not pycountry.subdivisions.get(code=cc + '-' + data.get('state')):
raise ValidationError(
{'state': ['"{}" is not a known subdivision of the country "{}".'.format(data.get('state'), cc)]}
)
return data
class CompatibleJSONField(serializers.JSONField):
def to_internal_value(self, data):
try:
return json.dumps(data)
except (TypeError, ValueError):
self.fail('invalid')
def to_representation(self, value):
if value:
return json.loads(value)
return value
class WrappedList:
def __init__(self, data):
self._data = data
def all(self):
return self._data
class WrappedModel:
def __init__(self, model):
self._wrapped = model
def __getattr__(self, item):
return getattr(self._wrapped, item)
def save(self, *args, **kwargs):
raise NotImplementedError
def delete(self, *args, **kwargs):
raise NotImplementedError
class OrderCreateSerializer(I18nAwareModelSerializer):
invoice_address = InvoiceAddressSerializer(required=False)
positions = OrderPositionCreateSerializer(many=True, required=True)
fees = OrderFeeCreateSerializer(many=True, required=False)
status = serializers.ChoiceField(choices=(
('n', Order.STATUS_PENDING),
('p', Order.STATUS_PAID),
), default='n', required=False)
code = serializers.CharField(
required=False,
max_length=16,
min_length=5
)
comment = serializers.CharField(required=False, allow_blank=True)
payment_provider = serializers.CharField(required=False, allow_null=True)
payment_info = CompatibleJSONField(required=False)
consume_carts = serializers.ListField(child=serializers.CharField(), required=False)
force = serializers.BooleanField(default=False, required=False)
payment_date = serializers.DateTimeField(required=False, allow_null=True)
send_email = serializers.BooleanField(default=False, required=False)
simulate = serializers.BooleanField(default=False, required=False)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['positions'].child.fields['voucher'].queryset = self.context['event'].vouchers.all()
class Meta:
model = Order
fields = ('code', 'status', 'testmode', 'email', 'phone', 'locale', 'payment_provider', 'fees', 'comment', 'sales_channel',
'invoice_address', 'positions', 'checkin_attention', 'payment_info', 'payment_date', 'consume_carts',
'force', 'send_email', 'simulate')
def validate_payment_provider(self, pp):
if pp is None:
return None
if pp not in self.context['event'].get_payment_providers():
raise ValidationError('The given payment provider is not known.')
return pp
def validate_sales_channel(self, channel):
if channel not in get_all_sales_channels():
raise ValidationError('Unknown sales channel.')
return channel
def validate_code(self, code):
if code and Order.objects.filter(event__organizer=self.context['event'].organizer, code=code).exists():
raise ValidationError(
'This order code is already in use.'
)
if any(c not in 'ABCDEFGHJKLMNPQRSTUVWXYZ1234567890' for c in code):
raise ValidationError(
'This order code contains invalid characters.'
)
return code
def validate_positions(self, data):
if not data:
raise ValidationError(
'An order cannot be empty.'
)
errs = [{} for p in data]
if any([p.get('positionid') for p in data]):
if not all([p.get('positionid') for p in data]):
for i, p in enumerate(data):
if not p.get('positionid'):
errs[i]['positionid'] = [
'If you set position IDs manually, you need to do so for all positions.'
]
raise ValidationError(errs)
last_non_add_on = None
last_posid = 0
for i, p in enumerate(data):
if p['positionid'] != last_posid + 1:
errs[i]['positionid'] = [
'Position IDs need to be consecutive.'
]
if p.get('addon_to') and p['addon_to'] != last_non_add_on:
errs[i]['addon_to'] = [
"If you set addon_to, you need to make sure that the referenced "
"position ID exists and is transmitted directly before its add-ons."
]
if not p.get('addon_to'):
last_non_add_on = p['positionid']
last_posid = p['positionid']
elif any([p.get('addon_to') for p in data]):
errs = [
{'positionid': ["If you set addon_to on any position, you need to specify position IDs manually."]}
for p in data
]
else:
for i, p in enumerate(data):
p['positionid'] = i + 1
if any(errs):
raise ValidationError(errs)
return data
def validate_testmode(self, testmode):
if 'sales_channel' in self.initial_data:
try:
sales_channel = get_all_sales_channels()[self.initial_data['sales_channel']]
if testmode and not sales_channel.testmode_supported:
raise ValidationError('This sales channel does not provide support for test mode.')
except KeyError:
# We do not need to raise a ValidationError here, since there is another check to validate the
# sales_channel
pass
return testmode
def create(self, validated_data):
fees_data = validated_data.pop('fees') if 'fees' in validated_data else []
positions_data = validated_data.pop('positions') if 'positions' in validated_data else []
payment_provider = validated_data.pop('payment_provider', None)
payment_info = validated_data.pop('payment_info', '{}')
payment_date = validated_data.pop('payment_date', now())
force = validated_data.pop('force', False)
simulate = validated_data.pop('simulate', False)
self._send_mail = validated_data.pop('send_email', False)
if 'invoice_address' in validated_data:
iadata = validated_data.pop('invoice_address')
name = iadata.pop('name', '')
if name and not iadata.get('name_parts'):
iadata['name_parts'] = {
'_legacy': name
}
ia = InvoiceAddress(**iadata)
else:
ia = None
lockfn = self.context['event'].lock
if simulate:
lockfn = NoLockManager
with lockfn() as now_dt:
free_seats = set()
seats_seen = set()
consume_carts = validated_data.pop('consume_carts', [])
delete_cps = []
quota_avail_cache = {}
v_budget = {}
voucher_usage = Counter()
if consume_carts:
for cp in CartPosition.objects.filter(
event=self.context['event'], cart_id__in=consume_carts, expires__gt=now()
):
quotas = (cp.variation.quotas.filter(subevent=cp.subevent)
if cp.variation else cp.item.quotas.filter(subevent=cp.subevent))
for quota in quotas:
if quota not in quota_avail_cache:
quota_avail_cache[quota] = list(quota.availability())
if quota_avail_cache[quota][1] is not None:
quota_avail_cache[quota][1] += 1
if cp.voucher:
voucher_usage[cp.voucher] -= 1
if cp.expires > now_dt:
if cp.seat:
free_seats.add(cp.seat)
delete_cps.append(cp)
errs = [{} for p in positions_data]
for i, pos_data in enumerate(positions_data):
if pos_data.get('voucher'):
v = pos_data['voucher']
if pos_data.get('addon_to'):
errs[i]['voucher'] = ['Vouchers are currently not supported for add-on products.']
continue
if not v.applies_to(pos_data['item'], pos_data.get('variation')):
errs[i]['voucher'] = [error_messages['voucher_invalid_item']]
continue
if v.subevent_id and pos_data.get('subevent').pk != v.subevent_id:
errs[i]['voucher'] = [error_messages['voucher_invalid_subevent']]
continue
if v.valid_until is not None and v.valid_until < now_dt:
errs[i]['voucher'] = [error_messages['voucher_expired']]
continue
voucher_usage[v] += 1
if voucher_usage[v] > 0:
redeemed_in_carts = CartPosition.objects.filter(
Q(voucher=pos_data['voucher']) & Q(event=self.context['event']) & Q(expires__gte=now_dt)
).exclude(pk__in=[cp.pk for cp in delete_cps])
v_avail = v.max_usages - v.redeemed - redeemed_in_carts.count()
if v_avail < voucher_usage[v]:
errs[i]['voucher'] = [
'The voucher has already been used the maximum number of times.'
]
if v.budget is not None:
price = pos_data.get('price')
if price is None:
price = get_price(
item=pos_data.get('item'),
variation=pos_data.get('variation'),
voucher=v,
custom_price=None,
subevent=pos_data.get('subevent'),
addon_to=pos_data.get('addon_to'),
invoice_address=ia,
).gross
pbv = get_price(
item=pos_data['item'],
variation=pos_data.get('variation'),
voucher=None,
custom_price=None,
subevent=pos_data.get('subevent'),
addon_to=pos_data.get('addon_to'),
invoice_address=ia,
)
if v not in v_budget:
v_budget[v] = v.budget - v.budget_used()
disc = pbv.gross - price
if disc > v_budget[v]:
new_disc = v_budget[v]
v_budget[v] -= new_disc
if new_disc == Decimal('0.00') or pos_data.get('price') is not None:
errs[i]['voucher'] = [
'The voucher has a remaining budget of {}, therefore a discount of {} can not be '
'given.'.format(v_budget[v] + new_disc, disc)
]
continue
pos_data['price'] = price + (disc - new_disc)
else:
v_budget[v] -= disc
seated = pos_data.get('item').seat_category_mappings.filter(subevent=pos_data.get('subevent')).exists()
if pos_data.get('seat'):
if not seated:
errs[i]['seat'] = ['The specified product does not allow to choose a seat.']
try:
seat = self.context['event'].seats.get(seat_guid=pos_data['seat'], subevent=pos_data.get('subevent'))
except Seat.DoesNotExist:
errs[i]['seat'] = ['The specified seat does not exist.']
else:
pos_data['seat'] = seat
if (seat not in free_seats and not seat.is_available(sales_channel=validated_data.get('sales_channel', 'web'))) or seat in seats_seen:
errs[i]['seat'] = [gettext_lazy('The selected seat "{seat}" is not available.').format(seat=seat.name)]
seats_seen.add(seat)
elif seated:
errs[i]['seat'] = ['The specified product requires to choose a seat.']
if not force:
for i, pos_data in enumerate(positions_data):
if pos_data.get('voucher'):
if pos_data['voucher'].allow_ignore_quota or pos_data['voucher'].block_quota:
continue
if pos_data.get('subevent'):
if pos_data.get('item').pk in pos_data['subevent'].item_overrides and pos_data['subevent'].item_overrides[pos_data['item'].pk].disabled:
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
if (
pos_data.get('variation') and pos_data['variation'].pk in pos_data['subevent'].var_overrides and
pos_data['subevent'].var_overrides[pos_data['variation'].pk].disabled
):
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
new_quotas = (pos_data.get('variation').quotas.filter(subevent=pos_data.get('subevent'))
if pos_data.get('variation')
else pos_data.get('item').quotas.filter(subevent=pos_data.get('subevent')))
if len(new_quotas) == 0:
errs[i]['item'] = [gettext_lazy('The product "{}" is not assigned to a quota.').format(
str(pos_data.get('item'))
)]
else:
for quota in new_quotas:
if quota not in quota_avail_cache:
quota_avail_cache[quota] = list(quota.availability())
if quota_avail_cache[quota][1] is not None:
quota_avail_cache[quota][1] -= 1
if quota_avail_cache[quota][1] < 0:
errs[i]['item'] = [
gettext_lazy('There is not enough quota available on quota "{}" to perform the operation.').format(
quota.name
)
]
if any(errs):
raise ValidationError({'positions': errs})
if validated_data.get('locale', None) is None:
validated_data['locale'] = self.context['event'].settings.locale
order = Order(event=self.context['event'], **validated_data)
order.set_expires(subevents=[p.get('subevent') for p in positions_data])
order.meta_info = "{}"
order.total = Decimal('0.00')
if simulate:
order = WrappedModel(order)
order.last_modified = now()
order.code = 'PREVIEW'
else:
order.save()
if ia:
if not simulate:
ia.order = order
ia.save()
else:
order.invoice_address = ia
ia.last_modified = now()
pos_map = {}
for pos_data in positions_data:
answers_data = pos_data.pop('answers', [])
addon_to = pos_data.pop('addon_to', None)
attendee_name = pos_data.pop('attendee_name', '')
if attendee_name and not pos_data.get('attendee_name_parts'):
pos_data['attendee_name_parts'] = {
'_legacy': attendee_name
}
pos = OrderPosition(**pos_data)
if simulate:
pos.order = order._wrapped
else:
pos.order = order
if addon_to:
if simulate:
pos.addon_to = pos_map[addon_to]._wrapped
else:
pos.addon_to = pos_map[addon_to]
if pos.price is None:
price = get_price(
item=pos.item,
variation=pos.variation,
voucher=pos.voucher,
custom_price=None,
subevent=pos.subevent,
addon_to=pos.addon_to,
invoice_address=ia,
)
pos.price = price.gross
pos.tax_rate = price.rate
pos.tax_value = price.tax
pos.tax_rule = pos.item.tax_rule
else:
pos._calculate_tax()
pos.price_before_voucher = get_price(
item=pos.item,
variation=pos.variation,
voucher=None,
custom_price=None,
subevent=pos.subevent,
addon_to=pos.addon_to,
invoice_address=ia,
).gross
if simulate:
pos = WrappedModel(pos)
pos.id = 0
answers = []
for answ_data in answers_data:
options = answ_data.pop('options', [])
answ = WrappedModel(QuestionAnswer(**answ_data))
answ.options = WrappedList(options)
answers.append(answ)
pos.answers = answers
pos.pseudonymization_id = "PREVIEW"
else:
if pos.voucher:
Voucher.objects.filter(pk=pos.voucher.pk).update(redeemed=F('redeemed') + 1)
pos.save()
for answ_data in answers_data:
options = answ_data.pop('options', [])
if isinstance(answ_data['answer'], File):
an = answ_data.pop('answer')
answ = pos.answers.create(**answ_data, answer='')
answ.file.save(an.name, an, save=False)
answ.answer = 'file://' + answ.file.name
answ.save()
else:
answ = pos.answers.create(**answ_data)
answ.options.add(*options)
pos_map[pos.positionid] = pos
if not simulate:
for cp in delete_cps:
cp.delete()
order.total = sum([p.price for p in pos_map.values()])
fees = []
for fee_data in fees_data:
is_percentage = fee_data.pop('_treat_value_as_percentage', False)
if is_percentage:
fee_data['value'] = round_decimal(order.total * (fee_data['value'] / Decimal('100.00')),
self.context['event'].currency)
is_split_taxes = fee_data.pop('_split_taxes_like_products', False)
if is_split_taxes:
d = defaultdict(lambda: Decimal('0.00'))
trz = TaxRule.zero()
for p in pos_map.values():
tr = p.tax_rule
d[tr] += p.price - p.tax_value
base_values = sorted([tuple(t) for t in d.items()], key=lambda t: (t[0] or trz).rate)
sum_base = sum(t[1] for t in base_values)
fee_values = [(t[0], round_decimal(fee_data['value'] * t[1] / sum_base, self.context['event'].currency))
for t in base_values]
sum_fee = sum(t[1] for t in fee_values)
# If there are rounding differences, we fix them up, but always leaning to the benefit of the tax
# authorities
if sum_fee > fee_data['value']:
fee_values[0] = (fee_values[0][0], fee_values[0][1] + (fee_data['value'] - sum_fee))
elif sum_fee < fee_data['value']:
fee_values[-1] = (fee_values[-1][0], fee_values[-1][1] + (fee_data['value'] - sum_fee))
for tr, val in fee_values:
fee_data['tax_rule'] = tr
fee_data['value'] = val
f = OrderFee(**fee_data)
f.order = order._wrapped if simulate else order
f._calculate_tax()
fees.append(f)
if not simulate:
f.save()
else:
f = OrderFee(**fee_data)
f.order = order._wrapped if simulate else order
f._calculate_tax()
fees.append(f)
if not simulate:
f.save()
order.total += sum([f.value for f in fees])
if simulate:
order.fees = fees
order.positions = pos_map.values()
return order # ignore payments
else:
order.save(update_fields=['total'])
if order.total == Decimal('0.00') and validated_data.get('status') == Order.STATUS_PAID and not payment_provider:
payment_provider = 'free'
if order.total == Decimal('0.00') and validated_data.get('status') != Order.STATUS_PAID:
order.status = Order.STATUS_PAID
order.save()
order.payments.create(
amount=order.total, provider='free', state=OrderPayment.PAYMENT_STATE_CONFIRMED,
payment_date=now()
)
elif payment_provider == "free" and order.total != Decimal('0.00'):
raise ValidationError('You cannot use the "free" payment provider for non-free orders.')
elif validated_data.get('status') == Order.STATUS_PAID:
if not payment_provider:
raise ValidationError('You cannot create a paid order without a payment provider.')
order.payments.create(
amount=order.total,
provider=payment_provider,
info=payment_info,
payment_date=payment_date,
state=OrderPayment.PAYMENT_STATE_CONFIRMED
)
elif payment_provider:
order.payments.create(
amount=order.total,
provider=payment_provider,
info=payment_info,
state=OrderPayment.PAYMENT_STATE_CREATED
)
return order
class LinePositionField(serializers.IntegerField):
"""
Internally, the position field is stored starting at 0, but for the API, starting at 1 makes it
more consistent with other models
"""
def to_representation(self, value):
return super().to_representation(value) + 1
def to_internal_value(self, data):
return super().to_internal_value(data) - 1
class InlineInvoiceLineSerializer(I18nAwareModelSerializer):
position = LinePositionField(read_only=True)
class Meta:
model = InvoiceLine
fields = ('position', 'description', 'item', 'variation', 'attendee_name', 'event_date_from',
'event_date_to', 'gross_value', 'tax_value', 'tax_rate', 'tax_name')
class InvoiceSerializer(I18nAwareModelSerializer):
order = serializers.SlugRelatedField(slug_field='code', read_only=True)
refers = serializers.SlugRelatedField(slug_field='full_invoice_no', read_only=True)
lines = InlineInvoiceLineSerializer(many=True)
invoice_to_country = CountryField()
invoice_from_country = CountryField()
class Meta:
model = Invoice
fields = ('order', 'number', 'is_cancellation', 'invoice_from', 'invoice_from_name', 'invoice_from_zipcode',
'invoice_from_city', 'invoice_from_country', 'invoice_from_tax_id', 'invoice_from_vat_id',
'invoice_to', 'invoice_to_company', 'invoice_to_name', 'invoice_to_street', 'invoice_to_zipcode',
'invoice_to_city', 'invoice_to_state', 'invoice_to_country', 'invoice_to_vat_id', 'invoice_to_beneficiary',
'custom_field', 'date', 'refers', 'locale',
'introductory_text', 'additional_text', 'payment_provider_text', 'footer_text', 'lines',
'foreign_currency_display', 'foreign_currency_rate', 'foreign_currency_rate_date',
'internal_reference')
class OrderPaymentCreateSerializer(I18nAwareModelSerializer):
provider = serializers.CharField(required=True, allow_null=False, allow_blank=False)
info = CompatibleJSONField(required=False)
class Meta:
model = OrderPayment
fields = ('state', 'amount', 'payment_date', 'provider', 'info')
def create(self, validated_data):
order = OrderPayment(order=self.context['order'], **validated_data)
order.save()
return order
class OrderRefundCreateSerializer(I18nAwareModelSerializer):
payment = serializers.IntegerField(required=False, allow_null=True)
provider = serializers.CharField(required=True, allow_null=False, allow_blank=False)
info = CompatibleJSONField(required=False)
class Meta:
model = OrderRefund
fields = ('state', 'source', 'amount', 'payment', 'execution_date', 'provider', 'info', 'comment')
def create(self, validated_data):
pid = validated_data.pop('payment', None)
if pid:
try:
p = self.context['order'].payments.get(local_id=pid)
except OrderPayment.DoesNotExist:
raise ValidationError('Unknown payment ID.')
else:
p = None
order = OrderRefund(order=self.context['order'], payment=p, **validated_data)
order.save()
return order
class RevokedTicketSecretSerializer(I18nAwareModelSerializer):
class Meta:
model = RevokedTicketSecret
fields = ('id', 'secret', 'created')
| 43.63567 | 160 | 0.571908 |
import json
from collections import Counter, defaultdict
from decimal import Decimal
import pycountry
from django.core.files import File
from django.db.models import F, Q
from django.utils.timezone import now
from django.utils.translation import gettext_lazy
from django_countries.fields import Country
from rest_framework import serializers
from rest_framework.exceptions import ValidationError
from rest_framework.relations import SlugRelatedField
from rest_framework.reverse import reverse
from pretix.api.serializers.event import SubEventSerializer
from pretix.api.serializers.i18n import I18nAwareModelSerializer
from pretix.api.serializers.item import (
InlineItemVariationSerializer, ItemSerializer,
)
from pretix.base.channels import get_all_sales_channels
from pretix.base.decimal import round_decimal
from pretix.base.i18n import language
from pretix.base.models import (
CachedFile, Checkin, Invoice, InvoiceAddress, InvoiceLine, Item,
ItemVariation, Order, OrderPosition, Question, QuestionAnswer, Seat,
SubEvent, TaxRule, Voucher,
)
from pretix.base.models.orders import (
CartPosition, OrderFee, OrderPayment, OrderRefund, RevokedTicketSecret,
)
from pretix.base.pdf import get_images, get_variables
from pretix.base.services.cart import error_messages
from pretix.base.services.locking import NoLockManager
from pretix.base.services.pricing import get_price
from pretix.base.settings import COUNTRIES_WITH_STATE_IN_ADDRESS
from pretix.base.signals import register_ticket_outputs
from pretix.multidomain.urlreverse import build_absolute_uri
class CompatibleCountryField(serializers.Field):
def to_internal_value(self, data):
return {self.field_name: Country(data)}
def to_representation(self, instance: InvoiceAddress):
if instance.country:
return str(instance.country)
elif hasattr(instance, 'country_old'):
return instance.country_old
class CountryField(serializers.Field):
def to_internal_value(self, data):
return {self.field_name: Country(data)}
def to_representation(self, src):
return str(src) if src else None
class InvoiceAddressSerializer(I18nAwareModelSerializer):
country = CompatibleCountryField(source='*')
name = serializers.CharField(required=False)
class Meta:
model = InvoiceAddress
fields = ('last_modified', 'is_business', 'company', 'name', 'name_parts', 'street', 'zipcode', 'city', 'country',
'state', 'vat_id', 'vat_id_validated', 'internal_reference')
read_only_fields = ('last_modified',)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for v in self.fields.values():
v.required = False
v.allow_blank = True
def validate(self, data):
if data.get('name') and data.get('name_parts'):
raise ValidationError(
{'name': ['Do not specify name if you specified name_parts.']}
)
if data.get('name_parts') and '_scheme' not in data.get('name_parts'):
data['name_parts']['_scheme'] = self.context['request'].event.settings.name_scheme
if data.get('country'):
if not pycountry.countries.get(alpha_2=data.get('country').code):
raise ValidationError(
{'country': ['Invalid country code.']}
)
if data.get('state'):
cc = str(data.get('country') or self.instance.country or '')
if cc not in COUNTRIES_WITH_STATE_IN_ADDRESS:
raise ValidationError(
{'state': ['States are not supported in country "{}".'.format(cc)]}
)
if not pycountry.subdivisions.get(code=cc + '-' + data.get('state')):
raise ValidationError(
{'state': ['"{}" is not a known subdivision of the country "{}".'.format(data.get('state'), cc)]}
)
return data
class AnswerQuestionIdentifierField(serializers.Field):
def to_representation(self, instance: QuestionAnswer):
return instance.question.identifier
class AnswerQuestionOptionsIdentifierField(serializers.Field):
def to_representation(self, instance: QuestionAnswer):
if isinstance(instance, WrappedModel) or instance.pk:
return [o.identifier for o in instance.options.all()]
return []
class InlineSeatSerializer(I18nAwareModelSerializer):
class Meta:
model = Seat
fields = ('id', 'name', 'seat_guid')
class AnswerSerializer(I18nAwareModelSerializer):
question_identifier = AnswerQuestionIdentifierField(source='*', read_only=True)
option_identifiers = AnswerQuestionOptionsIdentifierField(source='*', read_only=True)
def to_representation(self, instance):
r = super().to_representation(instance)
if r['answer'].startswith('file://') and instance.orderposition:
r['answer'] = reverse('api-v1:orderposition-answer', kwargs={
'organizer': instance.orderposition.order.event.organizer.slug,
'event': instance.orderposition.order.event.slug,
'pk': instance.orderposition.pk,
'question': instance.question_id,
}, request=self.context['request'])
return r
class Meta:
model = QuestionAnswer
fields = ('question', 'answer', 'question_identifier', 'options', 'option_identifiers')
def validate_question(self, q):
if q.event != self.context['event']:
raise ValidationError(
'The specified question does not belong to this event.'
)
return q
def _handle_file_upload(self, data):
if data['answer'] == 'file:keep':
return data
try:
ao = self.context["request"].user or self.context["request"].auth
cf = CachedFile.objects.get(
session_key=f'api-upload-{str(type(ao))}-{ao.pk}',
file__isnull=False,
pk=data['answer'][len("file:"):],
)
except (ValidationError, IndexError):
raise ValidationError('The submitted file ID "{fid}" was not found.'.format(fid=data))
except CachedFile.DoesNotExist:
raise ValidationError('The submitted file ID "{fid}" was not found.'.format(fid=data))
allowed_types = (
'image/png', 'image/jpeg', 'image/gif', 'application/pdf'
)
if cf.type not in allowed_types:
raise ValidationError('The submitted file "{fid}" has a file type that is not allowed in this field.'.format(fid=data))
if cf.file.size > 10 * 1024 * 1024:
raise ValidationError('The submitted file "{fid}" is too large to be used in this field.'.format(fid=data))
data['options'] = []
data['answer'] = cf.file
return data
def validate(self, data):
if data.get('question').type == Question.TYPE_FILE:
return self._handle_file_upload(data)
elif data.get('question').type in (Question.TYPE_CHOICE, Question.TYPE_CHOICE_MULTIPLE):
if not data.get('options'):
raise ValidationError(
'You need to specify options if the question is of a choice type.'
)
if data.get('question').type == Question.TYPE_CHOICE and len(data.get('options')) > 1:
raise ValidationError(
'You can specify at most one option for this question.'
)
for o in data.get('options'):
if o.question_id != data.get('question').pk:
raise ValidationError(
'The specified option does not belong to this question.'
)
data['answer'] = ", ".join([str(o) for o in data.get('options')])
else:
if data.get('options'):
raise ValidationError(
'You should not specify options if the question is not of a choice type.'
)
if data.get('question').type == Question.TYPE_BOOLEAN:
if data.get('answer') in ['true', 'True', '1', 'TRUE']:
data['answer'] = 'True'
elif data.get('answer') in ['false', 'False', '0', 'FALSE']:
data['answer'] = 'False'
else:
raise ValidationError(
'Please specify "true" or "false" for boolean questions.'
)
elif data.get('question').type == Question.TYPE_NUMBER:
serializers.DecimalField(
max_digits=50,
decimal_places=25
).to_internal_value(data.get('answer'))
elif data.get('question').type == Question.TYPE_DATE:
data['answer'] = serializers.DateField().to_internal_value(data.get('answer'))
elif data.get('question').type == Question.TYPE_TIME:
data['answer'] = serializers.TimeField().to_internal_value(data.get('answer'))
elif data.get('question').type == Question.TYPE_DATETIME:
data['answer'] = serializers.DateTimeField().to_internal_value(data.get('answer'))
return data
class CheckinSerializer(I18nAwareModelSerializer):
class Meta:
model = Checkin
fields = ('id', 'datetime', 'list', 'auto_checked_in', 'type')
class OrderDownloadsField(serializers.Field):
def to_representation(self, instance: Order):
if instance.status != Order.STATUS_PAID:
if instance.status != Order.STATUS_PENDING or instance.require_approval or not instance.event.settings.ticket_download_pending:
return []
request = self.context['request']
res = []
responses = register_ticket_outputs.send(instance.event)
for receiver, response in responses:
provider = response(instance.event)
if provider.is_enabled:
res.append({
'output': provider.identifier,
'url': reverse('api-v1:order-download', kwargs={
'organizer': instance.event.organizer.slug,
'event': instance.event.slug,
'code': instance.code,
'output': provider.identifier,
}, request=request)
})
return res
class PositionDownloadsField(serializers.Field):
def to_representation(self, instance: OrderPosition):
if instance.order.status != Order.STATUS_PAID:
if instance.order.status != Order.STATUS_PENDING or instance.order.require_approval or not instance.order.event.settings.ticket_download_pending:
return []
if not instance.generate_ticket:
return []
request = self.context['request']
res = []
responses = register_ticket_outputs.send(instance.order.event)
for receiver, response in responses:
provider = response(instance.order.event)
if provider.is_enabled:
res.append({
'output': provider.identifier,
'url': reverse('api-v1:orderposition-download', kwargs={
'organizer': instance.order.event.organizer.slug,
'event': instance.order.event.slug,
'pk': instance.pk,
'output': provider.identifier,
}, request=request)
})
return res
class PdfDataSerializer(serializers.Field):
def to_representation(self, instance: OrderPosition):
res = {}
ev = instance.subevent or instance.order.event
with language(instance.order.locale, instance.order.event.settings.region):
if 'vars' not in self.context:
self.context['vars'] = get_variables(self.context['request'].event)
if 'vars_images' not in self.context:
self.context['vars_images'] = get_images(self.context['request'].event)
for k, f in self.context['vars'].items():
res[k] = f['evaluate'](instance, instance.order, ev)
if not hasattr(ev, '_cached_meta_data'):
ev._cached_meta_data = ev.meta_data
for k, v in ev._cached_meta_data.items():
res['meta:' + k] = v
if not hasattr(instance.item, '_cached_meta_data'):
instance.item._cached_meta_data = instance.item.meta_data
for k, v in instance.item._cached_meta_data.items():
res['itemmeta:' + k] = v
res['images'] = {}
for k, f in self.context['vars_images'].items():
if 'etag' in f:
has_image = etag = f['etag'](instance, instance.order, ev)
else:
has_image = f['etag'](instance, instance.order, ev)
etag = None
if has_image:
url = reverse('api-v1:orderposition-pdf_image', kwargs={
'organizer': instance.order.event.organizer.slug,
'event': instance.order.event.slug,
'pk': instance.pk,
'key': k,
}, request=self.context['request'])
if etag:
url += f'#etag={etag}'
res['images'][k] = url
else:
res['images'][k] = None
return res
class OrderPositionSerializer(I18nAwareModelSerializer):
checkins = CheckinSerializer(many=True, read_only=True)
answers = AnswerSerializer(many=True)
downloads = PositionDownloadsField(source='*', read_only=True)
order = serializers.SlugRelatedField(slug_field='code', read_only=True)
pdf_data = PdfDataSerializer(source='*', read_only=True)
seat = InlineSeatSerializer(read_only=True)
country = CompatibleCountryField(source='*')
attendee_name = serializers.CharField(required=False)
class Meta:
model = OrderPosition
fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
'company', 'street', 'zipcode', 'city', 'country', 'state',
'attendee_email', 'voucher', 'tax_rate', 'tax_value', 'secret', 'addon_to', 'subevent', 'checkins',
'downloads', 'answers', 'tax_rule', 'pseudonymization_id', 'pdf_data', 'seat', 'canceled')
read_only_fields = (
'id', 'order', 'positionid', 'item', 'variation', 'price', 'voucher', 'tax_rate', 'tax_value', 'secret',
'addon_to', 'subevent', 'checkins', 'downloads', 'answers', 'tax_rule', 'pseudonymization_id', 'pdf_data',
'seat', 'canceled'
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
request = self.context.get('request')
if request and (not request.query_params.get('pdf_data', 'false') == 'true' or 'can_view_orders' not in request.eventpermset):
self.fields.pop('pdf_data', None)
def validate(self, data):
if data.get('attendee_name') and data.get('attendee_name_parts'):
raise ValidationError(
{'attendee_name': ['Do not specify attendee_name if you specified attendee_name_parts.']}
)
if data.get('attendee_name_parts') and '_scheme' not in data.get('attendee_name_parts'):
data['attendee_name_parts']['_scheme'] = self.context['request'].event.settings.name_scheme
if data.get('country'):
if not pycountry.countries.get(alpha_2=data.get('country').code):
raise ValidationError(
{'country': ['Invalid country code.']}
)
if data.get('state'):
cc = str(data.get('country') or self.instance.country or '')
if cc not in COUNTRIES_WITH_STATE_IN_ADDRESS:
raise ValidationError(
{'state': ['States are not supported in country "{}".'.format(cc)]}
)
if not pycountry.subdivisions.get(code=cc + '-' + data.get('state')):
raise ValidationError(
{'state': ['"{}" is not a known subdivision of the country "{}".'.format(data.get('state'), cc)]}
)
return data
def update(self, instance, validated_data):
# (hopefully), we'll be extra careful here and be explicit about the model fields we update.
update_fields = [
'attendee_name_parts', 'company', 'street', 'zipcode', 'city', 'country',
'state', 'attendee_email',
]
answers_data = validated_data.pop('answers', None)
name = validated_data.pop('attendee_name', '')
if name and not validated_data.get('attendee_name_parts'):
validated_data['attendee_name_parts'] = {
'_legacy': name
}
for attr, value in validated_data.items():
if attr in update_fields:
setattr(instance, attr, value)
instance.save(update_fields=update_fields)
if answers_data is not None:
qs_seen = set()
answercache = {
a.question_id: a for a in instance.answers.all()
}
for answ_data in answers_data:
options = answ_data.pop('options', [])
if answ_data['question'].pk in qs_seen:
raise ValidationError(f'Question {answ_data["question"]} was sent twice.')
if answ_data['question'].pk in answercache:
a = answercache[answ_data['question'].pk]
if isinstance(answ_data['answer'], File):
a.file.save(answ_data['answer'].name, answ_data['answer'], save=False)
a.answer = 'file://' + a.file.name
elif a.answer.startswith('file://') and answ_data['answer'] == "file:keep":
pass
else:
for attr, value in answ_data.items():
setattr(a, attr, value)
a.save()
else:
if isinstance(answ_data['answer'], File):
an = answ_data.pop('answer')
a = instance.answers.create(**answ_data, answer='')
a.file.save(an.name, an, save=False)
a.answer = 'file://' + a.file.name
a.save()
else:
a = instance.answers.create(**answ_data)
a.options.set(options)
qs_seen.add(a.question_id)
for qid, a in answercache.items():
if qid not in qs_seen:
a.delete()
return instance
class RequireAttentionField(serializers.Field):
def to_representation(self, instance: OrderPosition):
return instance.order.checkin_attention or instance.item.checkin_attention
class AttendeeNameField(serializers.Field):
def to_representation(self, instance: OrderPosition):
an = instance.attendee_name
if not an:
if instance.addon_to_id:
an = instance.addon_to.attendee_name
if not an:
try:
an = instance.order.invoice_address.name
except InvoiceAddress.DoesNotExist:
pass
return an
class AttendeeNamePartsField(serializers.Field):
def to_representation(self, instance: OrderPosition):
an = instance.attendee_name
p = instance.attendee_name_parts
if not an:
if instance.addon_to_id:
an = instance.addon_to.attendee_name
p = instance.addon_to.attendee_name_parts
if not an:
try:
p = instance.order.invoice_address.name_parts
except InvoiceAddress.DoesNotExist:
pass
return p
class CheckinListOrderPositionSerializer(OrderPositionSerializer):
require_attention = RequireAttentionField(source='*')
attendee_name = AttendeeNameField(source='*')
attendee_name_parts = AttendeeNamePartsField(source='*')
order__status = serializers.SlugRelatedField(read_only=True, slug_field='status', source='order')
class Meta:
model = OrderPosition
fields = ('id', 'order', 'positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts',
'company', 'street', 'zipcode', 'city', 'country', 'state',
'attendee_email', 'voucher', 'tax_rate', 'tax_value', 'secret', 'addon_to', 'subevent', 'checkins',
'downloads', 'answers', 'tax_rule', 'pseudonymization_id', 'pdf_data', 'seat', 'require_attention',
'order__status')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if 'subevent' in self.context['request'].query_params.getlist('expand'):
self.fields['subevent'] = SubEventSerializer(read_only=True)
if 'item' in self.context['request'].query_params.getlist('expand'):
self.fields['item'] = ItemSerializer(read_only=True)
if 'variation' in self.context['request'].query_params.getlist('expand'):
self.fields['variation'] = InlineItemVariationSerializer(read_only=True)
class OrderPaymentTypeField(serializers.Field):
def to_representation(self, instance: Order):
t = None
for p in instance.payments.all():
t = p.provider
return t
class OrderPaymentDateField(serializers.DateField):
def to_representation(self, instance: Order):
t = None
for p in instance.payments.all():
t = p.payment_date or t
if t:
return super().to_representation(t.date())
class OrderFeeSerializer(I18nAwareModelSerializer):
class Meta:
model = OrderFee
fields = ('fee_type', 'value', 'description', 'internal_type', 'tax_rate', 'tax_value', 'tax_rule', 'canceled')
class PaymentURLField(serializers.URLField):
def to_representation(self, instance: OrderPayment):
if instance.state != OrderPayment.PAYMENT_STATE_CREATED:
return None
return build_absolute_uri(self.context['event'], 'presale:event.order.pay', kwargs={
'order': instance.order.code,
'secret': instance.order.secret,
'payment': instance.pk,
})
class PaymentDetailsField(serializers.Field):
def to_representation(self, value: OrderPayment):
pp = value.payment_provider
if not pp:
return {}
return pp.api_payment_details(value)
class OrderPaymentSerializer(I18nAwareModelSerializer):
payment_url = PaymentURLField(source='*', allow_null=True, read_only=True)
details = PaymentDetailsField(source='*', allow_null=True, read_only=True)
class Meta:
model = OrderPayment
fields = ('local_id', 'state', 'amount', 'created', 'payment_date', 'provider', 'payment_url',
'details')
class OrderRefundSerializer(I18nAwareModelSerializer):
payment = SlugRelatedField(slug_field='local_id', read_only=True)
class Meta:
model = OrderRefund
fields = ('local_id', 'state', 'source', 'amount', 'payment', 'created', 'execution_date', 'comment', 'provider')
class OrderURLField(serializers.URLField):
def to_representation(self, instance: Order):
return build_absolute_uri(self.context['event'], 'presale:event.order', kwargs={
'order': instance.code,
'secret': instance.secret,
})
class OrderSerializer(I18nAwareModelSerializer):
invoice_address = InvoiceAddressSerializer(allow_null=True)
positions = OrderPositionSerializer(many=True, read_only=True)
fees = OrderFeeSerializer(many=True, read_only=True)
downloads = OrderDownloadsField(source='*', read_only=True)
payments = OrderPaymentSerializer(many=True, read_only=True)
refunds = OrderRefundSerializer(many=True, read_only=True)
payment_date = OrderPaymentDateField(source='*', read_only=True)
payment_provider = OrderPaymentTypeField(source='*', read_only=True)
url = OrderURLField(source='*', read_only=True)
class Meta:
model = Order
fields = (
'code', 'status', 'testmode', 'secret', 'email', 'phone', 'locale', 'datetime', 'expires', 'payment_date',
'payment_provider', 'fees', 'total', 'comment', 'invoice_address', 'positions', 'downloads',
'checkin_attention', 'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel',
'url'
)
read_only_fields = (
'code', 'status', 'testmode', 'secret', 'datetime', 'expires', 'payment_date',
'payment_provider', 'fees', 'total', 'positions', 'downloads',
'last_modified', 'payments', 'refunds', 'require_approval', 'sales_channel'
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if not self.context['request'].query_params.get('pdf_data', 'false') == 'true':
self.fields['positions'].child.fields.pop('pdf_data', None)
for exclude_field in self.context['request'].query_params.getlist('exclude'):
p = exclude_field.split('.')
if p[0] in self.fields:
if len(p) == 1:
del self.fields[p[0]]
elif len(p) == 2:
self.fields[p[0]].child.fields.pop(p[1])
def validate_locale(self, l):
if l not in set(k for k in self.instance.event.settings.locales):
raise ValidationError('"{}" is not a supported locale for this event.'.format(l))
return l
def update(self, instance, validated_data):
# (hopefully), we'll be extra careful here and be explicit about the model fields we update.
update_fields = ['comment', 'checkin_attention', 'email', 'locale', 'phone']
if 'invoice_address' in validated_data:
iadata = validated_data.pop('invoice_address')
if not iadata:
try:
instance.invoice_address.delete()
except InvoiceAddress.DoesNotExist:
pass
else:
name = iadata.pop('name', '')
if name and not iadata.get('name_parts'):
iadata['name_parts'] = {
'_legacy': name
}
try:
ia = instance.invoice_address
if iadata.get('vat_id') != ia.vat_id and 'vat_id_validated' not in iadata:
ia.vat_id_validated = False
self.fields['invoice_address'].update(ia, iadata)
except InvoiceAddress.DoesNotExist:
InvoiceAddress.objects.create(order=instance, **iadata)
for attr, value in validated_data.items():
if attr in update_fields:
setattr(instance, attr, value)
instance.save(update_fields=update_fields)
return instance
class AnswerQuestionOptionsField(serializers.Field):
def to_representation(self, instance: QuestionAnswer):
return [o.pk for o in instance.options.all()]
class SimulatedAnswerSerializer(AnswerSerializer):
options = AnswerQuestionOptionsField(read_only=True, source='*')
class SimulatedOrderPositionSerializer(OrderPositionSerializer):
answers = SimulatedAnswerSerializer(many=True)
addon_to = serializers.SlugRelatedField(read_only=True, slug_field='positionid')
class SimulatedOrderSerializer(OrderSerializer):
positions = SimulatedOrderPositionSerializer(many=True, read_only=True)
class PriceCalcSerializer(serializers.Serializer):
item = serializers.PrimaryKeyRelatedField(queryset=Item.objects.none(), required=False, allow_null=True)
variation = serializers.PrimaryKeyRelatedField(queryset=ItemVariation.objects.none(), required=False, allow_null=True)
subevent = serializers.PrimaryKeyRelatedField(queryset=SubEvent.objects.none(), required=False, allow_null=True)
tax_rule = serializers.PrimaryKeyRelatedField(queryset=TaxRule.objects.none(), required=False, allow_null=True)
locale = serializers.CharField(allow_null=True, required=False)
def __init__(self, *args, **kwargs):
event = kwargs.pop('event')
super().__init__(*args, **kwargs)
self.fields['item'].queryset = event.items.all()
self.fields['tax_rule'].queryset = event.tax_rules.all()
self.fields['variation'].queryset = ItemVariation.objects.filter(item__event=event)
if event.has_subevents:
self.fields['subevent'].queryset = event.subevents.all()
else:
del self.fields['subevent']
class AnswerCreateSerializer(AnswerSerializer):
pass
class OrderFeeCreateSerializer(I18nAwareModelSerializer):
_treat_value_as_percentage = serializers.BooleanField(default=False, required=False)
_split_taxes_like_products = serializers.BooleanField(default=False, required=False)
class Meta:
model = OrderFee
fields = ('fee_type', 'value', 'description', 'internal_type', 'tax_rule',
'_treat_value_as_percentage', '_split_taxes_like_products')
def validate_tax_rule(self, tr):
if tr and tr.event != self.context['event']:
raise ValidationError(
'The specified tax rate does not belong to this event.'
)
return tr
class OrderPositionCreateSerializer(I18nAwareModelSerializer):
answers = AnswerCreateSerializer(many=True, required=False)
addon_to = serializers.IntegerField(required=False, allow_null=True)
secret = serializers.CharField(required=False)
attendee_name = serializers.CharField(required=False, allow_null=True)
seat = serializers.CharField(required=False, allow_null=True)
price = serializers.DecimalField(required=False, allow_null=True, decimal_places=2,
max_digits=10)
voucher = serializers.SlugRelatedField(slug_field='code', queryset=Voucher.objects.none(),
required=False, allow_null=True)
country = CompatibleCountryField(source='*')
class Meta:
model = OrderPosition
fields = ('positionid', 'item', 'variation', 'price', 'attendee_name', 'attendee_name_parts', 'attendee_email',
'company', 'street', 'zipcode', 'city', 'country', 'state',
'secret', 'addon_to', 'subevent', 'answers', 'seat', 'voucher')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
for k, v in self.fields.items():
if k in ('company', 'street', 'zipcode', 'city', 'country', 'state'):
v.required = False
v.allow_blank = True
v.allow_null = True
def validate_secret(self, secret):
if secret and OrderPosition.all.filter(order__event=self.context['event'], secret=secret).exists():
raise ValidationError(
'You cannot assign a position secret that already exists.'
)
return secret
def validate_item(self, item):
if item.event != self.context['event']:
raise ValidationError(
'The specified item does not belong to this event.'
)
if not item.active:
raise ValidationError(
'The specified item is not active.'
)
return item
def validate_subevent(self, subevent):
if self.context['event'].has_subevents:
if not subevent:
raise ValidationError(
'You need to set a subevent.'
)
if subevent.event != self.context['event']:
raise ValidationError(
'The specified subevent does not belong to this event.'
)
elif subevent:
raise ValidationError(
'You cannot set a subevent for this event.'
)
return subevent
def validate(self, data):
if data.get('item'):
if data.get('item').has_variations:
if not data.get('variation'):
raise ValidationError({'variation': ['You should specify a variation for this item.']})
else:
if data.get('variation').item != data.get('item'):
raise ValidationError(
{'variation': ['The specified variation does not belong to the specified item.']}
)
elif data.get('variation'):
raise ValidationError(
{'variation': ['You cannot specify a variation for this item.']}
)
if data.get('attendee_name') and data.get('attendee_name_parts'):
raise ValidationError(
{'attendee_name': ['Do not specify attendee_name if you specified attendee_name_parts.']}
)
if data.get('attendee_name_parts') and '_scheme' not in data.get('attendee_name_parts'):
data['attendee_name_parts']['_scheme'] = self.context['request'].event.settings.name_scheme
if data.get('country'):
if not pycountry.countries.get(alpha_2=data.get('country').code):
raise ValidationError(
{'country': ['Invalid country code.']}
)
if data.get('state'):
cc = str(data.get('country') or self.instance.country or '')
if cc not in COUNTRIES_WITH_STATE_IN_ADDRESS:
raise ValidationError(
{'state': ['States are not supported in country "{}".'.format(cc)]}
)
if not pycountry.subdivisions.get(code=cc + '-' + data.get('state')):
raise ValidationError(
{'state': ['"{}" is not a known subdivision of the country "{}".'.format(data.get('state'), cc)]}
)
return data
class CompatibleJSONField(serializers.JSONField):
def to_internal_value(self, data):
try:
return json.dumps(data)
except (TypeError, ValueError):
self.fail('invalid')
def to_representation(self, value):
if value:
return json.loads(value)
return value
class WrappedList:
def __init__(self, data):
self._data = data
def all(self):
return self._data
class WrappedModel:
def __init__(self, model):
self._wrapped = model
def __getattr__(self, item):
return getattr(self._wrapped, item)
def save(self, *args, **kwargs):
raise NotImplementedError
def delete(self, *args, **kwargs):
raise NotImplementedError
class OrderCreateSerializer(I18nAwareModelSerializer):
invoice_address = InvoiceAddressSerializer(required=False)
positions = OrderPositionCreateSerializer(many=True, required=True)
fees = OrderFeeCreateSerializer(many=True, required=False)
status = serializers.ChoiceField(choices=(
('n', Order.STATUS_PENDING),
('p', Order.STATUS_PAID),
), default='n', required=False)
code = serializers.CharField(
required=False,
max_length=16,
min_length=5
)
comment = serializers.CharField(required=False, allow_blank=True)
payment_provider = serializers.CharField(required=False, allow_null=True)
payment_info = CompatibleJSONField(required=False)
consume_carts = serializers.ListField(child=serializers.CharField(), required=False)
force = serializers.BooleanField(default=False, required=False)
payment_date = serializers.DateTimeField(required=False, allow_null=True)
send_email = serializers.BooleanField(default=False, required=False)
simulate = serializers.BooleanField(default=False, required=False)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['positions'].child.fields['voucher'].queryset = self.context['event'].vouchers.all()
class Meta:
model = Order
fields = ('code', 'status', 'testmode', 'email', 'phone', 'locale', 'payment_provider', 'fees', 'comment', 'sales_channel',
'invoice_address', 'positions', 'checkin_attention', 'payment_info', 'payment_date', 'consume_carts',
'force', 'send_email', 'simulate')
def validate_payment_provider(self, pp):
if pp is None:
return None
if pp not in self.context['event'].get_payment_providers():
raise ValidationError('The given payment provider is not known.')
return pp
def validate_sales_channel(self, channel):
if channel not in get_all_sales_channels():
raise ValidationError('Unknown sales channel.')
return channel
def validate_code(self, code):
if code and Order.objects.filter(event__organizer=self.context['event'].organizer, code=code).exists():
raise ValidationError(
'This order code is already in use.'
)
if any(c not in 'ABCDEFGHJKLMNPQRSTUVWXYZ1234567890' for c in code):
raise ValidationError(
'This order code contains invalid characters.'
)
return code
def validate_positions(self, data):
if not data:
raise ValidationError(
'An order cannot be empty.'
)
errs = [{} for p in data]
if any([p.get('positionid') for p in data]):
if not all([p.get('positionid') for p in data]):
for i, p in enumerate(data):
if not p.get('positionid'):
errs[i]['positionid'] = [
'If you set position IDs manually, you need to do so for all positions.'
]
raise ValidationError(errs)
last_non_add_on = None
last_posid = 0
for i, p in enumerate(data):
if p['positionid'] != last_posid + 1:
errs[i]['positionid'] = [
'Position IDs need to be consecutive.'
]
if p.get('addon_to') and p['addon_to'] != last_non_add_on:
errs[i]['addon_to'] = [
"If you set addon_to, you need to make sure that the referenced "
"position ID exists and is transmitted directly before its add-ons."
]
if not p.get('addon_to'):
last_non_add_on = p['positionid']
last_posid = p['positionid']
elif any([p.get('addon_to') for p in data]):
errs = [
{'positionid': ["If you set addon_to on any position, you need to specify position IDs manually."]}
for p in data
]
else:
for i, p in enumerate(data):
p['positionid'] = i + 1
if any(errs):
raise ValidationError(errs)
return data
def validate_testmode(self, testmode):
if 'sales_channel' in self.initial_data:
try:
sales_channel = get_all_sales_channels()[self.initial_data['sales_channel']]
if testmode and not sales_channel.testmode_supported:
raise ValidationError('This sales channel does not provide support for test mode.')
except KeyError:
pass
return testmode
def create(self, validated_data):
fees_data = validated_data.pop('fees') if 'fees' in validated_data else []
positions_data = validated_data.pop('positions') if 'positions' in validated_data else []
payment_provider = validated_data.pop('payment_provider', None)
payment_info = validated_data.pop('payment_info', '{}')
payment_date = validated_data.pop('payment_date', now())
force = validated_data.pop('force', False)
simulate = validated_data.pop('simulate', False)
self._send_mail = validated_data.pop('send_email', False)
if 'invoice_address' in validated_data:
iadata = validated_data.pop('invoice_address')
name = iadata.pop('name', '')
if name and not iadata.get('name_parts'):
iadata['name_parts'] = {
'_legacy': name
}
ia = InvoiceAddress(**iadata)
else:
ia = None
lockfn = self.context['event'].lock
if simulate:
lockfn = NoLockManager
with lockfn() as now_dt:
free_seats = set()
seats_seen = set()
consume_carts = validated_data.pop('consume_carts', [])
delete_cps = []
quota_avail_cache = {}
v_budget = {}
voucher_usage = Counter()
if consume_carts:
for cp in CartPosition.objects.filter(
event=self.context['event'], cart_id__in=consume_carts, expires__gt=now()
):
quotas = (cp.variation.quotas.filter(subevent=cp.subevent)
if cp.variation else cp.item.quotas.filter(subevent=cp.subevent))
for quota in quotas:
if quota not in quota_avail_cache:
quota_avail_cache[quota] = list(quota.availability())
if quota_avail_cache[quota][1] is not None:
quota_avail_cache[quota][1] += 1
if cp.voucher:
voucher_usage[cp.voucher] -= 1
if cp.expires > now_dt:
if cp.seat:
free_seats.add(cp.seat)
delete_cps.append(cp)
errs = [{} for p in positions_data]
for i, pos_data in enumerate(positions_data):
if pos_data.get('voucher'):
v = pos_data['voucher']
if pos_data.get('addon_to'):
errs[i]['voucher'] = ['Vouchers are currently not supported for add-on products.']
continue
if not v.applies_to(pos_data['item'], pos_data.get('variation')):
errs[i]['voucher'] = [error_messages['voucher_invalid_item']]
continue
if v.subevent_id and pos_data.get('subevent').pk != v.subevent_id:
errs[i]['voucher'] = [error_messages['voucher_invalid_subevent']]
continue
if v.valid_until is not None and v.valid_until < now_dt:
errs[i]['voucher'] = [error_messages['voucher_expired']]
continue
voucher_usage[v] += 1
if voucher_usage[v] > 0:
redeemed_in_carts = CartPosition.objects.filter(
Q(voucher=pos_data['voucher']) & Q(event=self.context['event']) & Q(expires__gte=now_dt)
).exclude(pk__in=[cp.pk for cp in delete_cps])
v_avail = v.max_usages - v.redeemed - redeemed_in_carts.count()
if v_avail < voucher_usage[v]:
errs[i]['voucher'] = [
'The voucher has already been used the maximum number of times.'
]
if v.budget is not None:
price = pos_data.get('price')
if price is None:
price = get_price(
item=pos_data.get('item'),
variation=pos_data.get('variation'),
voucher=v,
custom_price=None,
subevent=pos_data.get('subevent'),
addon_to=pos_data.get('addon_to'),
invoice_address=ia,
).gross
pbv = get_price(
item=pos_data['item'],
variation=pos_data.get('variation'),
voucher=None,
custom_price=None,
subevent=pos_data.get('subevent'),
addon_to=pos_data.get('addon_to'),
invoice_address=ia,
)
if v not in v_budget:
v_budget[v] = v.budget - v.budget_used()
disc = pbv.gross - price
if disc > v_budget[v]:
new_disc = v_budget[v]
v_budget[v] -= new_disc
if new_disc == Decimal('0.00') or pos_data.get('price') is not None:
errs[i]['voucher'] = [
'The voucher has a remaining budget of {}, therefore a discount of {} can not be '
'given.'.format(v_budget[v] + new_disc, disc)
]
continue
pos_data['price'] = price + (disc - new_disc)
else:
v_budget[v] -= disc
seated = pos_data.get('item').seat_category_mappings.filter(subevent=pos_data.get('subevent')).exists()
if pos_data.get('seat'):
if not seated:
errs[i]['seat'] = ['The specified product does not allow to choose a seat.']
try:
seat = self.context['event'].seats.get(seat_guid=pos_data['seat'], subevent=pos_data.get('subevent'))
except Seat.DoesNotExist:
errs[i]['seat'] = ['The specified seat does not exist.']
else:
pos_data['seat'] = seat
if (seat not in free_seats and not seat.is_available(sales_channel=validated_data.get('sales_channel', 'web'))) or seat in seats_seen:
errs[i]['seat'] = [gettext_lazy('The selected seat "{seat}" is not available.').format(seat=seat.name)]
seats_seen.add(seat)
elif seated:
errs[i]['seat'] = ['The specified product requires to choose a seat.']
if not force:
for i, pos_data in enumerate(positions_data):
if pos_data.get('voucher'):
if pos_data['voucher'].allow_ignore_quota or pos_data['voucher'].block_quota:
continue
if pos_data.get('subevent'):
if pos_data.get('item').pk in pos_data['subevent'].item_overrides and pos_data['subevent'].item_overrides[pos_data['item'].pk].disabled:
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
if (
pos_data.get('variation') and pos_data['variation'].pk in pos_data['subevent'].var_overrides and
pos_data['subevent'].var_overrides[pos_data['variation'].pk].disabled
):
errs[i]['item'] = [gettext_lazy('The product "{}" is not available on this date.').format(
str(pos_data.get('item'))
)]
new_quotas = (pos_data.get('variation').quotas.filter(subevent=pos_data.get('subevent'))
if pos_data.get('variation')
else pos_data.get('item').quotas.filter(subevent=pos_data.get('subevent')))
if len(new_quotas) == 0:
errs[i]['item'] = [gettext_lazy('The product "{}" is not assigned to a quota.').format(
str(pos_data.get('item'))
)]
else:
for quota in new_quotas:
if quota not in quota_avail_cache:
quota_avail_cache[quota] = list(quota.availability())
if quota_avail_cache[quota][1] is not None:
quota_avail_cache[quota][1] -= 1
if quota_avail_cache[quota][1] < 0:
errs[i]['item'] = [
gettext_lazy('There is not enough quota available on quota "{}" to perform the operation.').format(
quota.name
)
]
if any(errs):
raise ValidationError({'positions': errs})
if validated_data.get('locale', None) is None:
validated_data['locale'] = self.context['event'].settings.locale
order = Order(event=self.context['event'], **validated_data)
order.set_expires(subevents=[p.get('subevent') for p in positions_data])
order.meta_info = "{}"
order.total = Decimal('0.00')
if simulate:
order = WrappedModel(order)
order.last_modified = now()
order.code = 'PREVIEW'
else:
order.save()
if ia:
if not simulate:
ia.order = order
ia.save()
else:
order.invoice_address = ia
ia.last_modified = now()
pos_map = {}
for pos_data in positions_data:
answers_data = pos_data.pop('answers', [])
addon_to = pos_data.pop('addon_to', None)
attendee_name = pos_data.pop('attendee_name', '')
if attendee_name and not pos_data.get('attendee_name_parts'):
pos_data['attendee_name_parts'] = {
'_legacy': attendee_name
}
pos = OrderPosition(**pos_data)
if simulate:
pos.order = order._wrapped
else:
pos.order = order
if addon_to:
if simulate:
pos.addon_to = pos_map[addon_to]._wrapped
else:
pos.addon_to = pos_map[addon_to]
if pos.price is None:
price = get_price(
item=pos.item,
variation=pos.variation,
voucher=pos.voucher,
custom_price=None,
subevent=pos.subevent,
addon_to=pos.addon_to,
invoice_address=ia,
)
pos.price = price.gross
pos.tax_rate = price.rate
pos.tax_value = price.tax
pos.tax_rule = pos.item.tax_rule
else:
pos._calculate_tax()
pos.price_before_voucher = get_price(
item=pos.item,
variation=pos.variation,
voucher=None,
custom_price=None,
subevent=pos.subevent,
addon_to=pos.addon_to,
invoice_address=ia,
).gross
if simulate:
pos = WrappedModel(pos)
pos.id = 0
answers = []
for answ_data in answers_data:
options = answ_data.pop('options', [])
answ = WrappedModel(QuestionAnswer(**answ_data))
answ.options = WrappedList(options)
answers.append(answ)
pos.answers = answers
pos.pseudonymization_id = "PREVIEW"
else:
if pos.voucher:
Voucher.objects.filter(pk=pos.voucher.pk).update(redeemed=F('redeemed') + 1)
pos.save()
for answ_data in answers_data:
options = answ_data.pop('options', [])
if isinstance(answ_data['answer'], File):
an = answ_data.pop('answer')
answ = pos.answers.create(**answ_data, answer='')
answ.file.save(an.name, an, save=False)
answ.answer = 'file://' + answ.file.name
answ.save()
else:
answ = pos.answers.create(**answ_data)
answ.options.add(*options)
pos_map[pos.positionid] = pos
if not simulate:
for cp in delete_cps:
cp.delete()
order.total = sum([p.price for p in pos_map.values()])
fees = []
for fee_data in fees_data:
is_percentage = fee_data.pop('_treat_value_as_percentage', False)
if is_percentage:
fee_data['value'] = round_decimal(order.total * (fee_data['value'] / Decimal('100.00')),
self.context['event'].currency)
is_split_taxes = fee_data.pop('_split_taxes_like_products', False)
if is_split_taxes:
d = defaultdict(lambda: Decimal('0.00'))
trz = TaxRule.zero()
for p in pos_map.values():
tr = p.tax_rule
d[tr] += p.price - p.tax_value
base_values = sorted([tuple(t) for t in d.items()], key=lambda t: (t[0] or trz).rate)
sum_base = sum(t[1] for t in base_values)
fee_values = [(t[0], round_decimal(fee_data['value'] * t[1] / sum_base, self.context['event'].currency))
for t in base_values]
sum_fee = sum(t[1] for t in fee_values)
if sum_fee > fee_data['value']:
fee_values[0] = (fee_values[0][0], fee_values[0][1] + (fee_data['value'] - sum_fee))
elif sum_fee < fee_data['value']:
fee_values[-1] = (fee_values[-1][0], fee_values[-1][1] + (fee_data['value'] - sum_fee))
for tr, val in fee_values:
fee_data['tax_rule'] = tr
fee_data['value'] = val
f = OrderFee(**fee_data)
f.order = order._wrapped if simulate else order
f._calculate_tax()
fees.append(f)
if not simulate:
f.save()
else:
f = OrderFee(**fee_data)
f.order = order._wrapped if simulate else order
f._calculate_tax()
fees.append(f)
if not simulate:
f.save()
order.total += sum([f.value for f in fees])
if simulate:
order.fees = fees
order.positions = pos_map.values()
return order
else:
order.save(update_fields=['total'])
if order.total == Decimal('0.00') and validated_data.get('status') == Order.STATUS_PAID and not payment_provider:
payment_provider = 'free'
if order.total == Decimal('0.00') and validated_data.get('status') != Order.STATUS_PAID:
order.status = Order.STATUS_PAID
order.save()
order.payments.create(
amount=order.total, provider='free', state=OrderPayment.PAYMENT_STATE_CONFIRMED,
payment_date=now()
)
elif payment_provider == "free" and order.total != Decimal('0.00'):
raise ValidationError('You cannot use the "free" payment provider for non-free orders.')
elif validated_data.get('status') == Order.STATUS_PAID:
if not payment_provider:
raise ValidationError('You cannot create a paid order without a payment provider.')
order.payments.create(
amount=order.total,
provider=payment_provider,
info=payment_info,
payment_date=payment_date,
state=OrderPayment.PAYMENT_STATE_CONFIRMED
)
elif payment_provider:
order.payments.create(
amount=order.total,
provider=payment_provider,
info=payment_info,
state=OrderPayment.PAYMENT_STATE_CREATED
)
return order
class LinePositionField(serializers.IntegerField):
def to_representation(self, value):
return super().to_representation(value) + 1
def to_internal_value(self, data):
return super().to_internal_value(data) - 1
class InlineInvoiceLineSerializer(I18nAwareModelSerializer):
position = LinePositionField(read_only=True)
class Meta:
model = InvoiceLine
fields = ('position', 'description', 'item', 'variation', 'attendee_name', 'event_date_from',
'event_date_to', 'gross_value', 'tax_value', 'tax_rate', 'tax_name')
class InvoiceSerializer(I18nAwareModelSerializer):
order = serializers.SlugRelatedField(slug_field='code', read_only=True)
refers = serializers.SlugRelatedField(slug_field='full_invoice_no', read_only=True)
lines = InlineInvoiceLineSerializer(many=True)
invoice_to_country = CountryField()
invoice_from_country = CountryField()
class Meta:
model = Invoice
fields = ('order', 'number', 'is_cancellation', 'invoice_from', 'invoice_from_name', 'invoice_from_zipcode',
'invoice_from_city', 'invoice_from_country', 'invoice_from_tax_id', 'invoice_from_vat_id',
'invoice_to', 'invoice_to_company', 'invoice_to_name', 'invoice_to_street', 'invoice_to_zipcode',
'invoice_to_city', 'invoice_to_state', 'invoice_to_country', 'invoice_to_vat_id', 'invoice_to_beneficiary',
'custom_field', 'date', 'refers', 'locale',
'introductory_text', 'additional_text', 'payment_provider_text', 'footer_text', 'lines',
'foreign_currency_display', 'foreign_currency_rate', 'foreign_currency_rate_date',
'internal_reference')
class OrderPaymentCreateSerializer(I18nAwareModelSerializer):
provider = serializers.CharField(required=True, allow_null=False, allow_blank=False)
info = CompatibleJSONField(required=False)
class Meta:
model = OrderPayment
fields = ('state', 'amount', 'payment_date', 'provider', 'info')
def create(self, validated_data):
order = OrderPayment(order=self.context['order'], **validated_data)
order.save()
return order
class OrderRefundCreateSerializer(I18nAwareModelSerializer):
payment = serializers.IntegerField(required=False, allow_null=True)
provider = serializers.CharField(required=True, allow_null=False, allow_blank=False)
info = CompatibleJSONField(required=False)
class Meta:
model = OrderRefund
fields = ('state', 'source', 'amount', 'payment', 'execution_date', 'provider', 'info', 'comment')
def create(self, validated_data):
pid = validated_data.pop('payment', None)
if pid:
try:
p = self.context['order'].payments.get(local_id=pid)
except OrderPayment.DoesNotExist:
raise ValidationError('Unknown payment ID.')
else:
p = None
order = OrderRefund(order=self.context['order'], payment=p, **validated_data)
order.save()
return order
class RevokedTicketSecretSerializer(I18nAwareModelSerializer):
class Meta:
model = RevokedTicketSecret
fields = ('id', 'secret', 'created')
| true | true |
1c3822e9820d11b876c263fd47b55ca874718491 | 876 | py | Python | tinypipe/utils/general.py | dave-msk/tinypipe | 0be1c3cafb2a1cb2f672e4688e84889fa52dad30 | [
"Apache-2.0"
] | null | null | null | tinypipe/utils/general.py | dave-msk/tinypipe | 0be1c3cafb2a1cb2f672e4688e84889fa52dad30 | [
"Apache-2.0"
] | null | null | null | tinypipe/utils/general.py | dave-msk/tinypipe | 0be1c3cafb2a1cb2f672e4688e84889fa52dad30 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Siu-Kei Muk (David). All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def val_or_default(val, default):
return val if val is not None else default
| 39.818182 | 80 | 0.705479 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
def val_or_default(val, default):
return val if val is not None else default
| true | true |
1c3823c76dbf00f9fce4c4498a7bed366e31e3ef | 90 | py | Python | cms/__init__.py | fp4code/django-cms | e382fc7fe039733fee187658066a7b4af656c49e | [
"BSD-3-Clause"
] | 1 | 2019-09-14T09:45:12.000Z | 2019-09-14T09:45:12.000Z | cms/__init__.py | zahedbri/django-cms | 70c11c90d14459ef63472f3d0e6c90086c6098c5 | [
"BSD-3-Clause"
] | null | null | null | cms/__init__.py | zahedbri/django-cms | 70c11c90d14459ef63472f3d0e6c90086c6098c5 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
__version__ = '3.4.5'
default_app_config = 'cms.apps.CMSConfig'
| 15 | 41 | 0.644444 |
__version__ = '3.4.5'
default_app_config = 'cms.apps.CMSConfig'
| true | true |
1c3823cc1f57943e9f990aa7d2ac42167d0e0e23 | 4,779 | py | Python | composition_root.py | plievana/python-ddd | 44b1932344260b33ee0cce538c0be0b56b21327d | [
"MIT"
] | null | null | null | composition_root.py | plievana/python-ddd | 44b1932344260b33ee0cce538c0be0b56b21327d | [
"MIT"
] | null | null | null | composition_root.py | plievana/python-ddd | 44b1932344260b33ee0cce538c0be0b56b21327d | [
"MIT"
] | null | null | null | import dependency_injector.containers as containers
import dependency_injector.providers as providers
from application.command_bus import CommandBus
from application.command_handlers import AddItemCommandHandler
from application.query_bus import QueryBus
from application.query_handlers import GetItemsQueryHandler
from application.services import IdentityHashingService
from infrastructure.framework.falcon.controllers import (InfoController,
ItemsController)
from infrastructure.framework.flask.controllers import (InfoController as FlaskInfoController,
ItemsController as FlaskItemsController)
from infrastructure.repositories.auction_items_repository import AuctionItemsRepository
from infrastructure.repositories.users_repository import InMemoryUsersRepository
from infrastructure.framework.falcon.authentication import BasicAuthenticationService
from infrastructure.framework.flask.authentication import BasicAuthenticationService as FlaskBasicAuthenticationService
class ObjectiveCommandHandler():
def __init__(self, logger):
self.logger = logger
def handle(self, command):
print('objective handler is handling', command, self.logger)
def functional_handler(logger):
def handle(command):
print('functional handler is handling', command, logger)
return handle
class BaseContainer(containers.DeclarativeContainer):
hashing_service_factory = providers.Singleton(IdentityHashingService)
falcon_authentication_service_factory = providers.Factory(BasicAuthenticationService,
users_repository=providers.Factory(InMemoryUsersRepository, hashing_service=hashing_service_factory)
)
flask_authentication_service_factory = providers.Factory(FlaskBasicAuthenticationService,
users_repository=providers.Factory(
InMemoryUsersRepository,
hashing_service=hashing_service_factory)
)
class CommandBusContainer(containers.DeclarativeContainer):
items_repository = providers.Singleton(AuctionItemsRepository)
command_handler_factory = providers.FactoryAggregate(
AddItemCommand=providers.Factory(AddItemCommandHandler,
items_repository=items_repository
)
)
command_bus_factory = providers.Factory(
CommandBus,
command_handler_factory=providers.DelegatedFactory(
command_handler_factory)
)
class QueryBusContainer(containers.DeclarativeContainer):
items_repository = providers.Singleton(AuctionItemsRepository)
query_handler_factory = providers.FactoryAggregate(
GetItemsQuery=providers.Factory(
GetItemsQueryHandler, items_repository=items_repository)
)
query_bus_factory = providers.Factory(
QueryBus, query_handler_factory=providers.DelegatedFactory(query_handler_factory))
class FalconContainer(containers.DeclarativeContainer):
items_controller_factory = providers.Factory(ItemsController,
command_bus=CommandBusContainer.command_bus_factory,
query_bus=QueryBusContainer.query_bus_factory,
authentication_service=BaseContainer.falcon_authentication_service_factory,
)
info_controller_factory = providers.Factory(InfoController,
authentication_service=BaseContainer.falcon_authentication_service_factory
)
class FlaskContainer(containers.DeclarativeContainer):
info_controller_factory = providers.Factory(FlaskInfoController,
authentication_service=BaseContainer.flask_authentication_service_factory
)
items_controller_factory = providers.Factory(FlaskItemsController,
command_bus=CommandBusContainer.command_bus_factory,
query_bus=QueryBusContainer.query_bus_factory,
authentication_service=BaseContainer.flask_authentication_service_factory,
) | 51.387097 | 162 | 0.642394 | import dependency_injector.containers as containers
import dependency_injector.providers as providers
from application.command_bus import CommandBus
from application.command_handlers import AddItemCommandHandler
from application.query_bus import QueryBus
from application.query_handlers import GetItemsQueryHandler
from application.services import IdentityHashingService
from infrastructure.framework.falcon.controllers import (InfoController,
ItemsController)
from infrastructure.framework.flask.controllers import (InfoController as FlaskInfoController,
ItemsController as FlaskItemsController)
from infrastructure.repositories.auction_items_repository import AuctionItemsRepository
from infrastructure.repositories.users_repository import InMemoryUsersRepository
from infrastructure.framework.falcon.authentication import BasicAuthenticationService
from infrastructure.framework.flask.authentication import BasicAuthenticationService as FlaskBasicAuthenticationService
class ObjectiveCommandHandler():
def __init__(self, logger):
self.logger = logger
def handle(self, command):
print('objective handler is handling', command, self.logger)
def functional_handler(logger):
def handle(command):
print('functional handler is handling', command, logger)
return handle
class BaseContainer(containers.DeclarativeContainer):
hashing_service_factory = providers.Singleton(IdentityHashingService)
falcon_authentication_service_factory = providers.Factory(BasicAuthenticationService,
users_repository=providers.Factory(InMemoryUsersRepository, hashing_service=hashing_service_factory)
)
flask_authentication_service_factory = providers.Factory(FlaskBasicAuthenticationService,
users_repository=providers.Factory(
InMemoryUsersRepository,
hashing_service=hashing_service_factory)
)
class CommandBusContainer(containers.DeclarativeContainer):
items_repository = providers.Singleton(AuctionItemsRepository)
command_handler_factory = providers.FactoryAggregate(
AddItemCommand=providers.Factory(AddItemCommandHandler,
items_repository=items_repository
)
)
command_bus_factory = providers.Factory(
CommandBus,
command_handler_factory=providers.DelegatedFactory(
command_handler_factory)
)
class QueryBusContainer(containers.DeclarativeContainer):
items_repository = providers.Singleton(AuctionItemsRepository)
query_handler_factory = providers.FactoryAggregate(
GetItemsQuery=providers.Factory(
GetItemsQueryHandler, items_repository=items_repository)
)
query_bus_factory = providers.Factory(
QueryBus, query_handler_factory=providers.DelegatedFactory(query_handler_factory))
class FalconContainer(containers.DeclarativeContainer):
items_controller_factory = providers.Factory(ItemsController,
command_bus=CommandBusContainer.command_bus_factory,
query_bus=QueryBusContainer.query_bus_factory,
authentication_service=BaseContainer.falcon_authentication_service_factory,
)
info_controller_factory = providers.Factory(InfoController,
authentication_service=BaseContainer.falcon_authentication_service_factory
)
class FlaskContainer(containers.DeclarativeContainer):
info_controller_factory = providers.Factory(FlaskInfoController,
authentication_service=BaseContainer.flask_authentication_service_factory
)
items_controller_factory = providers.Factory(FlaskItemsController,
command_bus=CommandBusContainer.command_bus_factory,
query_bus=QueryBusContainer.query_bus_factory,
authentication_service=BaseContainer.flask_authentication_service_factory,
) | true | true |
1c3823cef3b0698023be0f415936f0db8a9d85de | 2,198 | py | Python | qpricesim/model_code/economic_environment.py | ToFeWe/qpricesim | 2d4312ed1d1356449f0c168835a0662b238a27bb | [
"MIT"
] | 2 | 2022-03-22T12:16:37.000Z | 2022-03-22T12:48:46.000Z | qpricesim/model_code/economic_environment.py | ToFeWe/qpricesim | 2d4312ed1d1356449f0c168835a0662b238a27bb | [
"MIT"
] | null | null | null | qpricesim/model_code/economic_environment.py | ToFeWe/qpricesim | 2d4312ed1d1356449f0c168835a0662b238a27bb | [
"MIT"
] | null | null | null | """
A module that defines the economic environment the agents are interacting in.
"""
import numpy as np
from numba import njit
@njit
def calc_winning_price(all_prices):
"""
Helper function that takes in the array of all prices in the market
and returns the winning price and the number of firms that picked
this winning price.
Args:
all_prices (array): Array of all prices in the given round in the market
Returns:
tuple: winning_price, n_winning_price
winning_price (integer): Current market price
n_winning_price (integer): Number of firms that played the market price
"""
# Get winning price
# Lowest price wins the market
winning_price = np.min(all_prices)
# Get the number of players that played the winning price
n_winning_price = np.sum(np.where(all_prices == winning_price, 1, 0))
return winning_price, n_winning_price
@njit
def calc_reward(p_i, winning_price, n_winning_price, reservation_price, m_consumer):
"""
A function that calculates the reward given a simple Bertrand
environment with homogenous goods.
Use calc_winning_price() to retrieve winning_price, n_winning_price
for the given market prices first.
Args:
p_i (integer): Price the agent picked in the given round
(Non-index reprenstation of the action).
winning_price (integer): Market price
n_winning_price (integer): Number of firms that played the market price
reservation_price (integer): Maximal price the consumers are willing to pay
m_consumer (integer): Number of consumers in the market
Returns:
float: Economics profit/reward for the agent in the given period
"""
# If the agents charges a price above reservation price, he comes home with zero.
# If he plays the winning price, he shares the market with the others who played
# the winning price.
# If his price is above the winning price, he also goes home with zero.
if p_i > reservation_price:
return 0
elif p_i == winning_price:
return (1 / n_winning_price) * p_i * m_consumer
else:
return 0
| 32.80597 | 86 | 0.696087 | import numpy as np
from numba import njit
@njit
def calc_winning_price(all_prices):
winning_price = np.min(all_prices)
n_winning_price = np.sum(np.where(all_prices == winning_price, 1, 0))
return winning_price, n_winning_price
@njit
def calc_reward(p_i, winning_price, n_winning_price, reservation_price, m_consumer):
if p_i > reservation_price:
return 0
elif p_i == winning_price:
return (1 / n_winning_price) * p_i * m_consumer
else:
return 0
| true | true |
1c3824dda1e45856c1c591405392e5f8ca1dd8b1 | 8,686 | py | Python | dosna/backends/s3.py | ijjorama/DosNa | 8e5322a0e1b93a377a9a443d442253b45957dac2 | [
"Apache-2.0"
] | null | null | null | dosna/backends/s3.py | ijjorama/DosNa | 8e5322a0e1b93a377a9a443d442253b45957dac2 | [
"Apache-2.0"
] | null | null | null | dosna/backends/s3.py | ijjorama/DosNa | 8e5322a0e1b93a377a9a443d442253b45957dac2 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""Backend s3 uses a S3 interface to store the dataset and chunks data"""
import logging
import numpy as np
import boto3
from botocore.exceptions import ClientError
from dosna.backends import Backend
from dosna.backends.base import (BackendConnection, BackendDataChunk,
BackendDataset, ConnectionError,
DatasetNotFoundError)
from dosna.util import dtype2str, shape2str, str2shape
from dosna.util.data import slices2shape
_DATASET_ROOT = 'dataset_root'
_SIGNATURE = "DosNa Dataset"
_SHAPE = 'shape'
_DTYPE = 'dtype'
_FILLVALUE = 'fillvalue'
_CHUNK_GRID = 'chunk-grid'
_CHUNK_SIZE = 'chunk-size'
log = logging.getLogger(__name__)
# Sanitise bucket name to conform to AWS conventions
def bucketName(name):
return name.replace('_', '-').lower()
class S3Connection(BackendConnection):
"""
A S3 Connection that wraps boto3 S3 client
"""
def __init__(self, name, endpoint_url=None, verify=True,
profile_name='default',
*args, **kwargs):
super(S3Connection, self).__init__(name, *args, **kwargs)
self._endpoint_url = endpoint_url
self._verify = verify
self._client = None
self._profile_name = profile_name
super(S3Connection, self).__init__(name, *args, **kwargs)
def connect(self):
if self.connected:
raise ConnectionError(
'Connection {} is already open'.format(self.name))
session = boto3.session.Session(profile_name=self._profile_name)
# Use access key and secret_key in call to client?
self._client = session.client(
service_name='s3',
endpoint_url=self._endpoint_url,
verify=self._verify
)
# Check bucket exists and is writable
super(S3Connection, self).connect()
def disconnect(self):
if self.connected:
super(S3Connection, self).disconnect()
@property
def client(self):
return self._client
def create_dataset(self, name, shape=None, dtype=np.float32, fillvalue=0,
data=None, chunk_size=None):
if not ((shape is not None and dtype is not None) or data is not None):
raise Exception('Provide `shape` and `dtype` or `data`')
if self.has_dataset(name):
raise Exception('Dataset `%s` already exists' % name)
if data is not None:
shape = data.shape
dtype = data.dtype
if chunk_size is None:
chunk_size = shape
chunk_grid = (np.ceil(np.asarray(shape, float) / chunk_size))\
.astype(int)
name = bucketName(name)
log.debug('creating dataset %s with shape:%s chunk_size:%s '
'chunk_grid:%s', name, shape, chunk_size, chunk_grid)
try:
self._client.create_bucket(Bucket=name, ACL='private')
except ClientError as e:
code = e.response['Error']['Code']
if code is not None:
log.error('connect: create_bucket returns %s', code)
return None
metadata = {
_SHAPE: shape2str(shape),
_DTYPE: dtype2str(dtype),
_FILLVALUE: repr(fillvalue),
_CHUNK_GRID: shape2str(chunk_grid),
_CHUNK_SIZE: shape2str(chunk_size)
}
self._client.put_object(
Bucket=name, Key=_DATASET_ROOT,
Body=bytes(_SIGNATURE), Metadata=metadata
)
dataset = S3Dataset(
self, name, shape, dtype,
fillvalue, chunk_grid, chunk_size
)
return dataset
def get_dataset(self, name):
if not self.has_dataset(name):
raise DatasetNotFoundError('Dataset `%s` does not exist' % name)
metadata = self._dataset_root['Metadata']
if metadata is None:
raise DatasetNotFoundError(
'Dataset `%s` does not have required DosNa metadata' % name
)
shape = str2shape(metadata[_SHAPE])
dtype = metadata[_DTYPE]
fillvalue = int(metadata[_FILLVALUE])
chunk_grid = str2shape(metadata[_CHUNK_GRID])
chunk_size = str2shape(metadata[_CHUNK_SIZE])
dataset = S3Dataset(
self, name, shape, dtype, fillvalue,
chunk_grid, chunk_size
)
return dataset
def get_dataset_root(self, name):
name = bucketName(name)
dataset_root = None
try:
dataset_root = self._client.get_object(
Bucket=name, Key=_DATASET_ROOT
)
content = dataset_root['Body'].read()
if not content == _SIGNATURE:
dataset_root = None
except Exception:
pass # Don't need to report errors here
return dataset_root
def has_dataset(self, name):
self._dataset_root = self.get_dataset_root(name)
if self._dataset_root is None:
log.info("has_dataset: dataset %s does not exist", name)
return self._dataset_root is not None
def del_dataset(self, name):
if self.has_dataset(name):
name = bucketName(name)
try:
self._client.delete_object(Bucket=name, Key=_DATASET_ROOT)
self._client.delete_bucket(Bucket=name)
except ClientError as e:
log.error('del_dataset: cannot delete %s: %s',
name, e.response['Error'])
else:
raise DatasetNotFoundError(
'Dataset `{}` does not exist'.format(name))
class S3Dataset(BackendDataset):
"""
S3Dataset
"""
@property
def client(self):
return self.connection.client
def _idx2name(self, idx):
return '.'.join(map(str, idx))
def create_chunk(self, idx, data=None, slices=None):
if self.has_chunk(idx):
raise Exception('DataChunk `{}{}` already exists'.
format(self.name, idx))
name = self._idx2name(idx)
# print "Name = %s" % (name)
dtype = self.dtype
shape = self.chunk_size
fillvalue = self.fillvalue
datachunk = S3DataChunk(self, idx, name, shape, dtype, fillvalue)
if data is None:
data = np.full(shape, fillvalue, dtype)
datachunk.set_data(data, slices, fill_others=True)
return datachunk
def get_chunk(self, idx):
if self.has_chunk(idx):
name = self._idx2name(idx)
dtype = self.dtype
shape = self.chunk_size
fillvalue = self.fillvalue
return S3DataChunk(self, idx, name, shape, dtype, fillvalue)
return self.create_chunk(idx)
def has_chunk(self, idx):
has_chunk = False
name = self._idx2name(idx)
try:
self.client.head_object(Bucket=bucketName(self._name), Key=name)
has_chunk = True
except ClientError as e:
log.debug("ClientError: %s", e.response['Error']['Code'])
return has_chunk
def del_chunk(self, idx):
if self.has_chunk(idx):
self.client.delete_object(
Bucket=bucketName(self._name),
Key=self._idx2name(idx)
)
class S3DataChunk(BackendDataChunk):
@property
def client(self):
return self.dataset.client
def get_data(self, slices=None):
if slices is None:
slices = slice(None)
data = np.fromstring(self.read(), dtype=self.dtype, count=self.size)
data.shape = self.shape
return data[slices]
def set_data(self, values, slices=None, fill_others=False):
if slices is None or slices2shape(slices) == self.shape:
self.write_full(values.tobytes())
else:
if fill_others:
cdata = np.full(self.shape, self.fillvalue, self.dtype)
else:
cdata = self.get_data()
cdata[slices] = values
self.write_full(cdata.tobytes())
def write_full(self, data):
self.client.put_object(
Bucket=bucketName(self.dataset.name), Key=self.name, Body=data
)
def read(self, length=None, offset=0):
if length is None:
length = self.byte_count
byteRange = 'bytes={}-{}'.format(offset, offset+length-1)
return self.client.get_object(
Bucket=bucketName(self.dataset.name),
Key=self.name,
Range=byteRange
)['Body'].read()
_backend = Backend('s3', S3Connection, S3Dataset, S3DataChunk)
| 29.544218 | 79 | 0.590145 |
import logging
import numpy as np
import boto3
from botocore.exceptions import ClientError
from dosna.backends import Backend
from dosna.backends.base import (BackendConnection, BackendDataChunk,
BackendDataset, ConnectionError,
DatasetNotFoundError)
from dosna.util import dtype2str, shape2str, str2shape
from dosna.util.data import slices2shape
_DATASET_ROOT = 'dataset_root'
_SIGNATURE = "DosNa Dataset"
_SHAPE = 'shape'
_DTYPE = 'dtype'
_FILLVALUE = 'fillvalue'
_CHUNK_GRID = 'chunk-grid'
_CHUNK_SIZE = 'chunk-size'
log = logging.getLogger(__name__)
def bucketName(name):
return name.replace('_', '-').lower()
class S3Connection(BackendConnection):
def __init__(self, name, endpoint_url=None, verify=True,
profile_name='default',
*args, **kwargs):
super(S3Connection, self).__init__(name, *args, **kwargs)
self._endpoint_url = endpoint_url
self._verify = verify
self._client = None
self._profile_name = profile_name
super(S3Connection, self).__init__(name, *args, **kwargs)
def connect(self):
if self.connected:
raise ConnectionError(
'Connection {} is already open'.format(self.name))
session = boto3.session.Session(profile_name=self._profile_name)
self._client = session.client(
service_name='s3',
endpoint_url=self._endpoint_url,
verify=self._verify
)
super(S3Connection, self).connect()
def disconnect(self):
if self.connected:
super(S3Connection, self).disconnect()
@property
def client(self):
return self._client
def create_dataset(self, name, shape=None, dtype=np.float32, fillvalue=0,
data=None, chunk_size=None):
if not ((shape is not None and dtype is not None) or data is not None):
raise Exception('Provide `shape` and `dtype` or `data`')
if self.has_dataset(name):
raise Exception('Dataset `%s` already exists' % name)
if data is not None:
shape = data.shape
dtype = data.dtype
if chunk_size is None:
chunk_size = shape
chunk_grid = (np.ceil(np.asarray(shape, float) / chunk_size))\
.astype(int)
name = bucketName(name)
log.debug('creating dataset %s with shape:%s chunk_size:%s '
'chunk_grid:%s', name, shape, chunk_size, chunk_grid)
try:
self._client.create_bucket(Bucket=name, ACL='private')
except ClientError as e:
code = e.response['Error']['Code']
if code is not None:
log.error('connect: create_bucket returns %s', code)
return None
metadata = {
_SHAPE: shape2str(shape),
_DTYPE: dtype2str(dtype),
_FILLVALUE: repr(fillvalue),
_CHUNK_GRID: shape2str(chunk_grid),
_CHUNK_SIZE: shape2str(chunk_size)
}
self._client.put_object(
Bucket=name, Key=_DATASET_ROOT,
Body=bytes(_SIGNATURE), Metadata=metadata
)
dataset = S3Dataset(
self, name, shape, dtype,
fillvalue, chunk_grid, chunk_size
)
return dataset
def get_dataset(self, name):
if not self.has_dataset(name):
raise DatasetNotFoundError('Dataset `%s` does not exist' % name)
metadata = self._dataset_root['Metadata']
if metadata is None:
raise DatasetNotFoundError(
'Dataset `%s` does not have required DosNa metadata' % name
)
shape = str2shape(metadata[_SHAPE])
dtype = metadata[_DTYPE]
fillvalue = int(metadata[_FILLVALUE])
chunk_grid = str2shape(metadata[_CHUNK_GRID])
chunk_size = str2shape(metadata[_CHUNK_SIZE])
dataset = S3Dataset(
self, name, shape, dtype, fillvalue,
chunk_grid, chunk_size
)
return dataset
def get_dataset_root(self, name):
name = bucketName(name)
dataset_root = None
try:
dataset_root = self._client.get_object(
Bucket=name, Key=_DATASET_ROOT
)
content = dataset_root['Body'].read()
if not content == _SIGNATURE:
dataset_root = None
except Exception:
pass
return dataset_root
def has_dataset(self, name):
self._dataset_root = self.get_dataset_root(name)
if self._dataset_root is None:
log.info("has_dataset: dataset %s does not exist", name)
return self._dataset_root is not None
def del_dataset(self, name):
if self.has_dataset(name):
name = bucketName(name)
try:
self._client.delete_object(Bucket=name, Key=_DATASET_ROOT)
self._client.delete_bucket(Bucket=name)
except ClientError as e:
log.error('del_dataset: cannot delete %s: %s',
name, e.response['Error'])
else:
raise DatasetNotFoundError(
'Dataset `{}` does not exist'.format(name))
class S3Dataset(BackendDataset):
@property
def client(self):
return self.connection.client
def _idx2name(self, idx):
return '.'.join(map(str, idx))
def create_chunk(self, idx, data=None, slices=None):
if self.has_chunk(idx):
raise Exception('DataChunk `{}{}` already exists'.
format(self.name, idx))
name = self._idx2name(idx)
# print "Name = %s" % (name)
dtype = self.dtype
shape = self.chunk_size
fillvalue = self.fillvalue
datachunk = S3DataChunk(self, idx, name, shape, dtype, fillvalue)
if data is None:
data = np.full(shape, fillvalue, dtype)
datachunk.set_data(data, slices, fill_others=True)
return datachunk
def get_chunk(self, idx):
if self.has_chunk(idx):
name = self._idx2name(idx)
dtype = self.dtype
shape = self.chunk_size
fillvalue = self.fillvalue
return S3DataChunk(self, idx, name, shape, dtype, fillvalue)
return self.create_chunk(idx)
def has_chunk(self, idx):
has_chunk = False
name = self._idx2name(idx)
try:
self.client.head_object(Bucket=bucketName(self._name), Key=name)
has_chunk = True
except ClientError as e:
log.debug("ClientError: %s", e.response['Error']['Code'])
return has_chunk
def del_chunk(self, idx):
if self.has_chunk(idx):
self.client.delete_object(
Bucket=bucketName(self._name),
Key=self._idx2name(idx)
)
class S3DataChunk(BackendDataChunk):
@property
def client(self):
return self.dataset.client
def get_data(self, slices=None):
if slices is None:
slices = slice(None)
data = np.fromstring(self.read(), dtype=self.dtype, count=self.size)
data.shape = self.shape
return data[slices]
def set_data(self, values, slices=None, fill_others=False):
if slices is None or slices2shape(slices) == self.shape:
self.write_full(values.tobytes())
else:
if fill_others:
cdata = np.full(self.shape, self.fillvalue, self.dtype)
else:
cdata = self.get_data()
cdata[slices] = values
self.write_full(cdata.tobytes())
def write_full(self, data):
self.client.put_object(
Bucket=bucketName(self.dataset.name), Key=self.name, Body=data
)
def read(self, length=None, offset=0):
if length is None:
length = self.byte_count
byteRange = 'bytes={}-{}'.format(offset, offset+length-1)
return self.client.get_object(
Bucket=bucketName(self.dataset.name),
Key=self.name,
Range=byteRange
)['Body'].read()
_backend = Backend('s3', S3Connection, S3Dataset, S3DataChunk)
| true | true |
1c3825a7c22eda2611ef396149cf33214f4ab6ae | 8,509 | py | Python | benchmarks/f3_wrong_hints_permutations/scaling_software_termination/18-2Nested_false-termination_5.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/f3_wrong_hints_permutations/scaling_software_termination/18-2Nested_false-termination_5.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/f3_wrong_hints_permutations/scaling_software_termination/18-2Nested_false-termination_5.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z | from typing import Tuple, FrozenSet
from pysmt.environment import Environment as PysmtEnv
from pysmt.fnode import FNode
import pysmt.typing as types
from utils import symb_to_next
from hint import Hint, Location
def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode,
FNode]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
symbols = frozenset([pc, x, y])
m_1 = mgr.Int(-1)
n_locs = 3
max_int = n_locs
ints = []
pcs = []
x_pcs = []
for idx in range(n_locs):
num = mgr.Int(idx)
ints.append(num)
pcs.append(mgr.Equals(pc, num))
x_pcs.append(mgr.Equals(x_pc, num))
for idx in range(n_locs, max_int):
num = mgr.Int(idx)
ints.append(num)
pcend = mgr.Equals(pc, m_1)
x_pcend = mgr.Equals(x_pc, m_1)
init = pcs[0]
cfg = []
# pc = 0 & (x >= 0) -> pc' = 1
cond = mgr.GE(x, ints[0])
cfg.append(mgr.Implies(mgr.And(pcs[0], cond), x_pcs[1]))
# pc = 0 & !(x >= 0) -> pc' = -1
cfg.append(mgr.Implies(mgr.And(pcs[0], mgr.Not(cond)), x_pcend))
# pc = 1 -> pc' = 2
cfg.append(mgr.Implies(pcs[1], x_pcs[2]))
# pc = 2 -> pc' = 0
cfg.append(mgr.Implies(pcs[2], x_pcs[0]))
# pc = -1 -> pc' = -1
cfg.append(mgr.Implies(pcend, x_pcend))
trans = []
same_x = mgr.Equals(x_x, x)
same_y = mgr.Equals(x_y, y)
same = mgr.And(same_x, same_y)
# pc = 0 -> same
trans.append(mgr.Implies(pcs[0], same))
# pc = 1 -> x' = x + y & same_y
trans.append(mgr.Implies(pcs[1],
mgr.And(mgr.Equals(x_x, mgr.Plus(x, y)),
same_y)))
# pc = 2 -> same_x & y' = y + 1
trans.append(mgr.Implies(pcs[2],
mgr.And(same_x,
mgr.Equals(x_y, mgr.Plus(y, ints[1])))))
# pc = end -> same
trans.append(mgr.Implies(pcend, same))
trans = mgr.And(*cfg, *trans)
fairness = mgr.Not(mgr.Equals(pc, m_1))
return symbols, init, trans, fairness
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
symbs = frozenset([pc, x, y])
m_100 = mgr.Int(-100)
m_1 = mgr.Int(-1)
i_0 = mgr.Int(0)
i_1 = mgr.Int(1)
i_2 = mgr.Int(2)
i_4 = mgr.Int(4)
i_20 = mgr.Int(20)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
res = []
loc0 = Location(env, mgr.Equals(pc, i_1))
loc0.set_progress(1, mgr.GT(x_pc, pc))
loc1 = Location(env, mgr.GE(pc, i_2))
loc1.set_progress(0, mgr.Equals(x_pc, mgr.Div(pc, pc)))
h_pc = Hint("h_pc2", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1])
res.append(h_pc)
stutter = mgr.Equals(x_y, y)
loc = Location(env, mgr.TRUE(), mgr.LE(x, i_20), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_y, mgr.Plus(x, y)))
h_y = Hint("h_y1", env, frozenset([y]), symbs)
h_y.set_locs([loc])
res.append(h_y)
loc0 = Location(env, mgr.LE(x, i_0))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Times(x, x)))
loc1 = Location(env, mgr.GE(x, i_0))
loc1.set_progress(0, mgr.LT(x_x, mgr.Times(m_1, x, x)))
h_x = Hint("h_x5", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Times(x, y)))
loc1 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc1.set_progress(2, mgr.GT(x_x, y))
loc2 = Location(env, mgr.GE(x, i_2))
loc2.set_progress(0, mgr.GE(x_x, i_20))
h_x = Hint("h_x4", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1, loc2])
res.append(h_x)
loc0 = Location(env, mgr.GE(y, m_100), mgr.LE(x, i_20))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Times(x, y)))
loc1 = Location(env, mgr.TRUE(), mgr.GE(x, m_100))
loc1.set_progress(2, mgr.GE(x_y, i_20))
loc2 = Location(env, mgr.TRUE())
loc2.set_progress(0, mgr.And(mgr.GE(x_y, m_100), mgr.LE(x_y, i_0)))
h_y = Hint("h_y4", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1, loc2])
res.append(h_y)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Times(x, y)))
loc1 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc1.set_progress(0, mgr.Equals(x_x, y))
h_x = Hint("h_x3", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
stutter = mgr.Equals(x_y, y)
loc = Location(env, mgr.TRUE(), mgr.LE(x, i_20), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_y, mgr.Plus(i_1, y)))
h_y = Hint("h_y0", env, frozenset([y]), symbs)
h_y.set_locs([loc])
res.append(h_y)
loc0 = Location(env, mgr.Equals(pc, i_1))
loc0.set_progress(1, mgr.GT(x_pc, mgr.Plus(pc, i_1)))
loc1 = Location(env, mgr.GT(pc, i_2))
loc1.set_progress(0, mgr.Equals(x_pc, i_1))
h_pc = Hint("h_pc0", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1])
res.append(h_pc)
loc0 = Location(env, mgr.LE(x, i_20))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Plus(mgr.Times(x, x), i_1)))
loc1 = Location(env, mgr.GE(x, i_20))
loc1.set_progress(0, mgr.LT(x_x, mgr.Times(m_1, x, x)))
h_x = Hint("h_x6", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GE(y, m_100), mgr.LE(x, i_20))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Times(x, y)))
loc1 = Location(env, mgr.TRUE(), mgr.GE(x, m_100))
loc1.set_progress(0, mgr.Equals(x_y, m_100))
h_y = Hint("h_y3", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1])
res.append(h_y)
loc0 = Location(env, mgr.LE(pc, i_1))
loc0.set_progress(1, mgr.GT(x_pc, pc))
loc1 = Location(env, mgr.LE(pc, i_2))
loc1.set_progress(0, mgr.Equals(x_pc, mgr.Div(pc, pc)))
h_pc = Hint("h_pc3", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1])
res.append(h_pc)
loc0 = Location(env, mgr.GE(y, m_100))
loc0.set_progress(0, mgr.Equals(x_y, mgr.Times(y, y)))
h_y = Hint("h_y5", env, frozenset([y]), symbs)
h_y.set_locs([loc0])
res.append(h_y)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Plus(x, y)))
loc1 = Location(env, mgr.GE(x, i_2), mgr.GE(y, i_1))
loc1.set_progress(0, mgr.Equals(x_x, y))
h_x = Hint("h_x2", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GE(y, m_100), mgr.LE(x, i_20))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Plus(x, y)))
loc1 = Location(env, mgr.TRUE(), mgr.GE(x, m_100))
loc1.set_progress(0, mgr.Equals(x_y, m_100))
h_y = Hint("h_y2", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1])
res.append(h_y)
loc0 = Location(env, mgr.GE(y, m_100))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Times(y, y)))
loc1 = Location(env, mgr.GE(y, i_0))
loc1.set_progress(0, mgr.GE(x_y, mgr.Plus(y, i_1)))
h_y = Hint("h_y6", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1])
res.append(h_y)
stutter = mgr.Equals(x_x, x)
loc = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y)))
h_x = Hint("h_x1", env, frozenset([x]), symbs)
h_x.set_locs([loc])
res.append(h_x)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GT(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Plus(mgr.Times(x, y), i_1)))
loc1 = Location(env, mgr.GE(x, i_2))
loc1.set_progress(2, mgr.LT(x_x, mgr.Times(m_1, x, x)))
loc2 = Location(env, mgr.LE(x, i_4))
loc2.set_progress(0, mgr.GE(x_x, mgr.Div(x, x)))
h_x = Hint("h_x7", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1, loc2])
res.append(h_x)
stutter = mgr.Equals(x_x, x)
loc = Location(env, mgr.GE(x, i_20), mgr.GE(y, i_1), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y)))
h_x = Hint("h_x0", env, frozenset([x]), symbs)
h_x.set_locs([loc])
res.append(h_x)
return frozenset(res)
| 31.75 | 77 | 0.590786 | from typing import Tuple, FrozenSet
from pysmt.environment import Environment as PysmtEnv
from pysmt.fnode import FNode
import pysmt.typing as types
from utils import symb_to_next
from hint import Hint, Location
def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode,
FNode]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
symbols = frozenset([pc, x, y])
m_1 = mgr.Int(-1)
n_locs = 3
max_int = n_locs
ints = []
pcs = []
x_pcs = []
for idx in range(n_locs):
num = mgr.Int(idx)
ints.append(num)
pcs.append(mgr.Equals(pc, num))
x_pcs.append(mgr.Equals(x_pc, num))
for idx in range(n_locs, max_int):
num = mgr.Int(idx)
ints.append(num)
pcend = mgr.Equals(pc, m_1)
x_pcend = mgr.Equals(x_pc, m_1)
init = pcs[0]
cfg = []
cond = mgr.GE(x, ints[0])
cfg.append(mgr.Implies(mgr.And(pcs[0], cond), x_pcs[1]))
# pc = 0 & !(x >= 0) -> pc' = -1
cfg.append(mgr.Implies(mgr.And(pcs[0], mgr.Not(cond)), x_pcend))
cfg.append(mgr.Implies(pcs[1], x_pcs[2]))
# pc = 2 -> pc' = 0
cfg.append(mgr.Implies(pcs[2], x_pcs[0]))
cfg.append(mgr.Implies(pcend, x_pcend))
trans = []
same_x = mgr.Equals(x_x, x)
same_y = mgr.Equals(x_y, y)
same = mgr.And(same_x, same_y)
# pc = 0 -> same
trans.append(mgr.Implies(pcs[0], same))
# pc = 1 -> x' = x + y & same_y
trans.append(mgr.Implies(pcs[1],
mgr.And(mgr.Equals(x_x, mgr.Plus(x, y)),
same_y)))
trans.append(mgr.Implies(pcs[2],
mgr.And(same_x,
mgr.Equals(x_y, mgr.Plus(y, ints[1])))))
# pc = end -> same
trans.append(mgr.Implies(pcend, same))
trans = mgr.And(*cfg, *trans)
fairness = mgr.Not(mgr.Equals(pc, m_1))
return symbols, init, trans, fairness
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
symbs = frozenset([pc, x, y])
m_100 = mgr.Int(-100)
m_1 = mgr.Int(-1)
i_0 = mgr.Int(0)
i_1 = mgr.Int(1)
i_2 = mgr.Int(2)
i_4 = mgr.Int(4)
i_20 = mgr.Int(20)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
res = []
loc0 = Location(env, mgr.Equals(pc, i_1))
loc0.set_progress(1, mgr.GT(x_pc, pc))
loc1 = Location(env, mgr.GE(pc, i_2))
loc1.set_progress(0, mgr.Equals(x_pc, mgr.Div(pc, pc)))
h_pc = Hint("h_pc2", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1])
res.append(h_pc)
stutter = mgr.Equals(x_y, y)
loc = Location(env, mgr.TRUE(), mgr.LE(x, i_20), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_y, mgr.Plus(x, y)))
h_y = Hint("h_y1", env, frozenset([y]), symbs)
h_y.set_locs([loc])
res.append(h_y)
loc0 = Location(env, mgr.LE(x, i_0))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Times(x, x)))
loc1 = Location(env, mgr.GE(x, i_0))
loc1.set_progress(0, mgr.LT(x_x, mgr.Times(m_1, x, x)))
h_x = Hint("h_x5", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Times(x, y)))
loc1 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc1.set_progress(2, mgr.GT(x_x, y))
loc2 = Location(env, mgr.GE(x, i_2))
loc2.set_progress(0, mgr.GE(x_x, i_20))
h_x = Hint("h_x4", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1, loc2])
res.append(h_x)
loc0 = Location(env, mgr.GE(y, m_100), mgr.LE(x, i_20))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Times(x, y)))
loc1 = Location(env, mgr.TRUE(), mgr.GE(x, m_100))
loc1.set_progress(2, mgr.GE(x_y, i_20))
loc2 = Location(env, mgr.TRUE())
loc2.set_progress(0, mgr.And(mgr.GE(x_y, m_100), mgr.LE(x_y, i_0)))
h_y = Hint("h_y4", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1, loc2])
res.append(h_y)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Times(x, y)))
loc1 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc1.set_progress(0, mgr.Equals(x_x, y))
h_x = Hint("h_x3", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
stutter = mgr.Equals(x_y, y)
loc = Location(env, mgr.TRUE(), mgr.LE(x, i_20), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_y, mgr.Plus(i_1, y)))
h_y = Hint("h_y0", env, frozenset([y]), symbs)
h_y.set_locs([loc])
res.append(h_y)
loc0 = Location(env, mgr.Equals(pc, i_1))
loc0.set_progress(1, mgr.GT(x_pc, mgr.Plus(pc, i_1)))
loc1 = Location(env, mgr.GT(pc, i_2))
loc1.set_progress(0, mgr.Equals(x_pc, i_1))
h_pc = Hint("h_pc0", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1])
res.append(h_pc)
loc0 = Location(env, mgr.LE(x, i_20))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Plus(mgr.Times(x, x), i_1)))
loc1 = Location(env, mgr.GE(x, i_20))
loc1.set_progress(0, mgr.LT(x_x, mgr.Times(m_1, x, x)))
h_x = Hint("h_x6", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GE(y, m_100), mgr.LE(x, i_20))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Times(x, y)))
loc1 = Location(env, mgr.TRUE(), mgr.GE(x, m_100))
loc1.set_progress(0, mgr.Equals(x_y, m_100))
h_y = Hint("h_y3", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1])
res.append(h_y)
loc0 = Location(env, mgr.LE(pc, i_1))
loc0.set_progress(1, mgr.GT(x_pc, pc))
loc1 = Location(env, mgr.LE(pc, i_2))
loc1.set_progress(0, mgr.Equals(x_pc, mgr.Div(pc, pc)))
h_pc = Hint("h_pc3", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1])
res.append(h_pc)
loc0 = Location(env, mgr.GE(y, m_100))
loc0.set_progress(0, mgr.Equals(x_y, mgr.Times(y, y)))
h_y = Hint("h_y5", env, frozenset([y]), symbs)
h_y.set_locs([loc0])
res.append(h_y)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Plus(x, y)))
loc1 = Location(env, mgr.GE(x, i_2), mgr.GE(y, i_1))
loc1.set_progress(0, mgr.Equals(x_x, y))
h_x = Hint("h_x2", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.GE(y, m_100), mgr.LE(x, i_20))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Plus(x, y)))
loc1 = Location(env, mgr.TRUE(), mgr.GE(x, m_100))
loc1.set_progress(0, mgr.Equals(x_y, m_100))
h_y = Hint("h_y2", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1])
res.append(h_y)
loc0 = Location(env, mgr.GE(y, m_100))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Times(y, y)))
loc1 = Location(env, mgr.GE(y, i_0))
loc1.set_progress(0, mgr.GE(x_y, mgr.Plus(y, i_1)))
h_y = Hint("h_y6", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1])
res.append(h_y)
stutter = mgr.Equals(x_x, x)
loc = Location(env, mgr.GE(x, i_1), mgr.GE(y, i_1), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y)))
h_x = Hint("h_x1", env, frozenset([x]), symbs)
h_x.set_locs([loc])
res.append(h_x)
loc0 = Location(env, mgr.GE(x, i_1), mgr.GT(y, i_1))
loc0.set_progress(1, mgr.Equals(x_x, mgr.Plus(mgr.Times(x, y), i_1)))
loc1 = Location(env, mgr.GE(x, i_2))
loc1.set_progress(2, mgr.LT(x_x, mgr.Times(m_1, x, x)))
loc2 = Location(env, mgr.LE(x, i_4))
loc2.set_progress(0, mgr.GE(x_x, mgr.Div(x, x)))
h_x = Hint("h_x7", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1, loc2])
res.append(h_x)
stutter = mgr.Equals(x_x, x)
loc = Location(env, mgr.GE(x, i_20), mgr.GE(y, i_1), stutterT=stutter)
loc.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y)))
h_x = Hint("h_x0", env, frozenset([x]), symbs)
h_x.set_locs([loc])
res.append(h_x)
return frozenset(res)
| true | true |
1c38271b2f5390e82710ce248c2a449c33d2bc3e | 1,659 | py | Python | src/genie/libs/parser/iosxe/tests/ShowLispInstanceIdIpv4MapCache/cli/equal/golden_output6_metric_expected.py | nielsvanhooy/genieparser | 9a1955749697a6777ca614f0af4d5f3a2c254ccd | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/iosxe/tests/ShowLispInstanceIdIpv4MapCache/cli/equal/golden_output6_metric_expected.py | nielsvanhooy/genieparser | 9a1955749697a6777ca614f0af4d5f3a2c254ccd | [
"Apache-2.0"
] | null | null | null | src/genie/libs/parser/iosxe/tests/ShowLispInstanceIdIpv4MapCache/cli/equal/golden_output6_metric_expected.py | nielsvanhooy/genieparser | 9a1955749697a6777ca614f0af4d5f3a2c254ccd | [
"Apache-2.0"
] | null | null | null | expected_output = {
'lisp_id': {
0: {
'instance_id': {
111: {
'eid_prefix': {
'0.0.0.0/0': {
'action': 'send-map-request + Encapsulating to proxy ETR',
'expiry_time': '00:14:59',
'locators': {
'21.21.21.21': {
'encap_iid': '-',
'metric': None,
'priority': 255,
'rloc_state': 'admin-down',
'uptime': '00:00:23',
'weight': 50,
},
'24.24.24.24': {
'encap_iid': '-',
'metric': 0,
'priority': 50,
'rloc_state': 'up',
'uptime': '00:00:23',
'weight': 50,
},
},
'map_reply_state': 'unknown-eid-forward',
'negative_cache_entry': False,
'uptime': '00:00:01',
'via': 'map-reply',
},
},
'eid_table': 'vrf prov ',
'entries': 1,
},
},
},
},
} | 41.475 | 87 | 0.221218 | expected_output = {
'lisp_id': {
0: {
'instance_id': {
111: {
'eid_prefix': {
'0.0.0.0/0': {
'action': 'send-map-request + Encapsulating to proxy ETR',
'expiry_time': '00:14:59',
'locators': {
'21.21.21.21': {
'encap_iid': '-',
'metric': None,
'priority': 255,
'rloc_state': 'admin-down',
'uptime': '00:00:23',
'weight': 50,
},
'24.24.24.24': {
'encap_iid': '-',
'metric': 0,
'priority': 50,
'rloc_state': 'up',
'uptime': '00:00:23',
'weight': 50,
},
},
'map_reply_state': 'unknown-eid-forward',
'negative_cache_entry': False,
'uptime': '00:00:01',
'via': 'map-reply',
},
},
'eid_table': 'vrf prov ',
'entries': 1,
},
},
},
},
} | true | true |
1c382759860b98ee6972f4f10fd270f3d5268410 | 665 | py | Python | nnabla_nas/optimizer/__init__.py | sony/nnabla-nas | 269deb8229fda0f0901c47d21ac5ce244f403f63 | [
"Apache-2.0"
] | 16 | 2020-07-10T08:31:18.000Z | 2022-03-24T13:28:15.000Z | nnabla_nas/optimizer/__init__.py | sony/nnabla-nas | 269deb8229fda0f0901c47d21ac5ce244f403f63 | [
"Apache-2.0"
] | 1 | 2020-10-21T12:46:30.000Z | 2021-02-03T00:18:29.000Z | nnabla_nas/optimizer/__init__.py | sony/nnabla-nas | 269deb8229fda0f0901c47d21ac5ce244f403f63 | [
"Apache-2.0"
] | 3 | 2020-07-15T11:42:11.000Z | 2022-03-25T16:54:49.000Z | # Copyright (c) 2020 Sony Corporation. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .optimizer import Optimizer
__all__ = ['Optimizer']
| 36.944444 | 74 | 0.760902 |
from .optimizer import Optimizer
__all__ = ['Optimizer']
| true | true |
1c382788882dfc7e712f0027de33754dbb491241 | 3,234 | py | Python | src/DanceCV.py | stbnps/DanceCV | ac94f5f715c246a50f67f99abbe926a7461fba11 | [
"BSD-3-Clause"
] | 33 | 2015-01-02T08:14:39.000Z | 2021-12-08T16:13:12.000Z | src/DanceCV.py | stbnps/DanceCV | ac94f5f715c246a50f67f99abbe926a7461fba11 | [
"BSD-3-Clause"
] | 2 | 2018-03-03T09:18:58.000Z | 2020-06-29T08:25:20.000Z | src/DanceCV.py | stbnps/DanceCV | ac94f5f715c246a50f67f99abbe926a7461fba11 | [
"BSD-3-Clause"
] | 17 | 2015-11-22T03:42:59.000Z | 2021-11-18T13:05:05.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Copyright (c) 2014, Esteban Pardo Sánchez
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
from Resource import Resource
from Audio import Audio
from Scene import Scene
from Song import Song, Note, loadSong
from Input import Input
import pygame
from pygame.locals import *
import sys
import getopt
import Constants
#import cProfile as profile
class DanceCV():
def __init__(self, song, speed):
self.input = Input()
self.resource = Resource()
self.audio = Audio()
self.audio.pre_open()
pygame.init()
self.audio.open()
if song != None:
self.song = loadSong(self.resource, song)
else:
self.song = loadSong(self.resource, "gangnam")
self.clock = pygame.time.Clock()
pygame.display.set_mode((Constants.SCREEN_WIDTH, Constants.SCREEN_HEIGHT))
pygame.display.set_caption("DanceCV")
screen = pygame.display.get_surface()
if speed != None:
self.scene = Scene(self.resource, self.song, screen, self.input, speed)
else:
self.scene = Scene(self.resource, self.song, screen, self.input, 2)
def run(self):
while True:
for events in pygame.event.get():
if events.type == QUIT:
sys.exit(0)
self.input.run()
self.scene.run()
pygame.display.update()
self.clock.tick(30)
if __name__ == "__main__":
song = None
speed = None
options, remainder = getopt.getopt(sys.argv[1:], 's:x:')
for opt, arg in options:
if opt in ('-s'):
song = arg
elif opt in ('-x'):
speed = float(arg)
game = DanceCV(song, speed)
game.run()
| 34.404255 | 83 | 0.683055 |
from Resource import Resource
from Audio import Audio
from Scene import Scene
from Song import Song, Note, loadSong
from Input import Input
import pygame
from pygame.locals import *
import sys
import getopt
import Constants
class DanceCV():
def __init__(self, song, speed):
self.input = Input()
self.resource = Resource()
self.audio = Audio()
self.audio.pre_open()
pygame.init()
self.audio.open()
if song != None:
self.song = loadSong(self.resource, song)
else:
self.song = loadSong(self.resource, "gangnam")
self.clock = pygame.time.Clock()
pygame.display.set_mode((Constants.SCREEN_WIDTH, Constants.SCREEN_HEIGHT))
pygame.display.set_caption("DanceCV")
screen = pygame.display.get_surface()
if speed != None:
self.scene = Scene(self.resource, self.song, screen, self.input, speed)
else:
self.scene = Scene(self.resource, self.song, screen, self.input, 2)
def run(self):
while True:
for events in pygame.event.get():
if events.type == QUIT:
sys.exit(0)
self.input.run()
self.scene.run()
pygame.display.update()
self.clock.tick(30)
if __name__ == "__main__":
song = None
speed = None
options, remainder = getopt.getopt(sys.argv[1:], 's:x:')
for opt, arg in options:
if opt in ('-s'):
song = arg
elif opt in ('-x'):
speed = float(arg)
game = DanceCV(song, speed)
game.run()
| true | true |
1c3828893b4eae531bd9c2811241ec3653b28a34 | 4,349 | tac | Python | mailmynet/Maildir/proxy_postfix/Twisted-11.0.0/doc/core/howto/tutorial/listings/finger/finger18.tac | SPIN-UMass/SWEET | 1b0f39222e7064f70812e3293ca023619295741d | [
"MIT"
] | 3 | 2020-04-02T06:23:44.000Z | 2020-08-13T20:32:31.000Z | mailmynet/Maildir/proxy_postfix/Twisted-11.0.0/doc/core/howto/tutorial/listings/finger/finger18.tac | SPIN-UMass/SWEET | 1b0f39222e7064f70812e3293ca023619295741d | [
"MIT"
] | null | null | null | mailmynet/Maildir/proxy_postfix/Twisted-11.0.0/doc/core/howto/tutorial/listings/finger/finger18.tac | SPIN-UMass/SWEET | 1b0f39222e7064f70812e3293ca023619295741d | [
"MIT"
] | 1 | 2020-04-02T06:26:10.000Z | 2020-04-02T06:26:10.000Z | # Do everything properly
from twisted.application import internet, service
from twisted.internet import protocol, reactor, defer
from twisted.words.protocols import irc
from twisted.protocols import basic
from twisted.web import resource, server, static, xmlrpc
import cgi
def catchError(err):
return "Internal error in server"
class FingerProtocol(basic.LineReceiver):
def lineReceived(self, user):
d = self.factory.getUser(user)
d.addErrback(catchError)
def writeValue(value):
self.transport.write(value+'\r\n')
self.transport.loseConnection()
d.addCallback(writeValue)
class IRCReplyBot(irc.IRCClient):
def connectionMade(self):
self.nickname = self.factory.nickname
irc.IRCClient.connectionMade(self)
def privmsg(self, user, channel, msg):
user = user.split('!')[0]
if self.nickname.lower() == channel.lower():
d = self.factory.getUser(msg)
d.addErrback(catchError)
d.addCallback(lambda m: "Status of %s: %s" % (msg, m))
d.addCallback(lambda m: self.msg(user, m))
class UserStatusTree(resource.Resource):
def __init__(self, service):
resource.Resource.__init__(self)
self.service = service
def render_GET(self, request):
d = self.service.getUsers()
def formatUsers(users):
l = ['<li><a href="%s">%s</a></li>' % (user, user)
for user in users]
return '<ul>'+''.join(l)+'</ul>'
d.addCallback(formatUsers)
d.addCallback(request.write)
d.addCallback(lambda _: request.finish())
return server.NOT_DONE_YET
def getChild(self, path, request):
if path=="":
return UserStatusTree(self.service)
else:
return UserStatus(path, self.service)
class UserStatus(resource.Resource):
def __init__(self, user, service):
resource.Resource.__init__(self)
self.user = user
self.service = service
def render_GET(self, request):
d = self.service.getUser(self.user)
d.addCallback(cgi.escape)
d.addCallback(lambda m:
'<h1>%s</h1>'%self.user+'<p>%s</p>'%m)
d.addCallback(request.write)
d.addCallback(lambda _: request.finish())
return server.NOT_DONE_YET
class UserStatusXR(xmlrpc.XMLRPC):
def __init__(self, service):
xmlrpc.XMLRPC.__init__(self)
self.service = service
def xmlrpc_getUser(self, user):
return self.service.getUser(user)
class FingerService(service.Service):
def __init__(self, filename):
self.filename = filename
self.users = {}
def _read(self):
self.users.clear()
for line in file(self.filename):
user, status = line.split(':', 1)
user = user.strip()
status = status.strip()
self.users[user] = status
self.call = reactor.callLater(30, self._read)
def getUser(self, user):
return defer.succeed(self.users.get(user, "No such user"))
def getUsers(self):
return defer.succeed(self.users.keys())
def getFingerFactory(self):
f = protocol.ServerFactory()
f.protocol = FingerProtocol
f.getUser = self.getUser
return f
def getResource(self):
r = UserStatusTree(self)
x = UserStatusXR(self)
r.putChild('RPC2', x)
return r
def getIRCBot(self, nickname):
f = protocol.ReconnectingClientFactory()
f.protocol = IRCReplyBot
f.nickname = nickname
f.getUser = self.getUser
return f
def startService(self):
self._read()
service.Service.startService(self)
def stopService(self):
service.Service.stopService(self)
self.call.cancel()
application = service.Application('finger', uid=1, gid=1)
f = FingerService('/etc/users')
serviceCollection = service.IServiceCollection(application)
internet.TCPServer(79, f.getFingerFactory()
).setServiceParent(serviceCollection)
internet.TCPServer(8000, server.Site(f.getResource())
).setServiceParent(serviceCollection)
internet.TCPClient('irc.freenode.org', 6667, f.getIRCBot('fingerbot')
).setServiceParent(serviceCollection)
| 29.585034 | 69 | 0.627271 |
from twisted.application import internet, service
from twisted.internet import protocol, reactor, defer
from twisted.words.protocols import irc
from twisted.protocols import basic
from twisted.web import resource, server, static, xmlrpc
import cgi
def catchError(err):
return "Internal error in server"
class FingerProtocol(basic.LineReceiver):
def lineReceived(self, user):
d = self.factory.getUser(user)
d.addErrback(catchError)
def writeValue(value):
self.transport.write(value+'\r\n')
self.transport.loseConnection()
d.addCallback(writeValue)
class IRCReplyBot(irc.IRCClient):
def connectionMade(self):
self.nickname = self.factory.nickname
irc.IRCClient.connectionMade(self)
def privmsg(self, user, channel, msg):
user = user.split('!')[0]
if self.nickname.lower() == channel.lower():
d = self.factory.getUser(msg)
d.addErrback(catchError)
d.addCallback(lambda m: "Status of %s: %s" % (msg, m))
d.addCallback(lambda m: self.msg(user, m))
class UserStatusTree(resource.Resource):
def __init__(self, service):
resource.Resource.__init__(self)
self.service = service
def render_GET(self, request):
d = self.service.getUsers()
def formatUsers(users):
l = ['<li><a href="%s">%s</a></li>' % (user, user)
for user in users]
return '<ul>'+''.join(l)+'</ul>'
d.addCallback(formatUsers)
d.addCallback(request.write)
d.addCallback(lambda _: request.finish())
return server.NOT_DONE_YET
def getChild(self, path, request):
if path=="":
return UserStatusTree(self.service)
else:
return UserStatus(path, self.service)
class UserStatus(resource.Resource):
def __init__(self, user, service):
resource.Resource.__init__(self)
self.user = user
self.service = service
def render_GET(self, request):
d = self.service.getUser(self.user)
d.addCallback(cgi.escape)
d.addCallback(lambda m:
'<h1>%s</h1>'%self.user+'<p>%s</p>'%m)
d.addCallback(request.write)
d.addCallback(lambda _: request.finish())
return server.NOT_DONE_YET
class UserStatusXR(xmlrpc.XMLRPC):
def __init__(self, service):
xmlrpc.XMLRPC.__init__(self)
self.service = service
def xmlrpc_getUser(self, user):
return self.service.getUser(user)
class FingerService(service.Service):
def __init__(self, filename):
self.filename = filename
self.users = {}
def _read(self):
self.users.clear()
for line in file(self.filename):
user, status = line.split(':', 1)
user = user.strip()
status = status.strip()
self.users[user] = status
self.call = reactor.callLater(30, self._read)
def getUser(self, user):
return defer.succeed(self.users.get(user, "No such user"))
def getUsers(self):
return defer.succeed(self.users.keys())
def getFingerFactory(self):
f = protocol.ServerFactory()
f.protocol = FingerProtocol
f.getUser = self.getUser
return f
def getResource(self):
r = UserStatusTree(self)
x = UserStatusXR(self)
r.putChild('RPC2', x)
return r
def getIRCBot(self, nickname):
f = protocol.ReconnectingClientFactory()
f.protocol = IRCReplyBot
f.nickname = nickname
f.getUser = self.getUser
return f
def startService(self):
self._read()
service.Service.startService(self)
def stopService(self):
service.Service.stopService(self)
self.call.cancel()
application = service.Application('finger', uid=1, gid=1)
f = FingerService('/etc/users')
serviceCollection = service.IServiceCollection(application)
internet.TCPServer(79, f.getFingerFactory()
).setServiceParent(serviceCollection)
internet.TCPServer(8000, server.Site(f.getResource())
).setServiceParent(serviceCollection)
internet.TCPClient('irc.freenode.org', 6667, f.getIRCBot('fingerbot')
).setServiceParent(serviceCollection)
| true | true |
1c3828ea6b5762a786575e6a8677fcca9f43fee1 | 3,220 | py | Python | app/db/__init__.py | MoeZilla/BlueMoonVampireBot | af074f846b56e7a4f262740d7b65b32ab11a4e42 | [
"MIT"
] | 1 | 2022-01-25T06:28:51.000Z | 2022-01-25T06:28:51.000Z | app/db/__init__.py | MoeZilla/BlueMoonVampireBot | af074f846b56e7a4f262740d7b65b32ab11a4e42 | [
"MIT"
] | null | null | null | app/db/__init__.py | MoeZilla/BlueMoonVampireBot | af074f846b56e7a4f262740d7b65b32ab11a4e42 | [
"MIT"
] | null | null | null | """
MIT License
Copyright (C) 2021-2022, NkSama
Copyright (C) 2021-2022 Moezilla
Copyright (c) 2021, Sylviorus, <https://github.com/Sylviorus/BlueMoonVampireBot>
This file is part of @BlueMoonVampireBot (Antispam Telegram Bot)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import json
from operator import index
from re import S
from pymongo import MongoClient
import os
class DATABASE:
def __init__(self, db_url) -> None:
self.db_url = db_url
self.role_db = MongoClient(self.db_url)['Sylviorus']['CUSTOM_ROLES']
def already_exists(self, user_id):
x = self.db.find_one({"user_id": user_id})
if x:
return True
else:
return False
def add_role(self, user_id, role):
if self.role_db.find_one({"user_id": user_id}):
self.role_db.update_one({"user_id": user_id},
{"$set": {
"role": role
}})
else:
self.role_db.insert_one({"user_id": user_id, "role": role})
def get_role(self, user_id):
if self.role_db.find_one({"user_id": user_id}):
role = self.role_db.find_one({"user_id": user_id})
final = {
"user_id": role['user_id'],
"role": role['role'],
"status": True,
}
return final
else:
return {"status": False}
class LocalDb:
def __init__(self, db_name) -> None:
self.db_name = f"{db_name}.json"
def create_db(self):
x = open(self.db_name, "w+")
x.write("""{\"hello" : "world\"}""")
x.close()
def add_reason(self, key, value):
if os.path.exists(self.db_name):
with open(self.db_name) as f:
db = json.load(f)
db.update({key: value})
with open(self.db_name, "w") as f:
json.dump(db, f)
else:
return "Create db first"
def get_reason(self, key):
if os.path.exists(self.db_name):
with open(self.db_name, "r") as f:
db = json.load(f)
return db[key]
else:
return "No Data"
| 33.195876 | 80 | 0.614907 |
import json
from operator import index
from re import S
from pymongo import MongoClient
import os
class DATABASE:
def __init__(self, db_url) -> None:
self.db_url = db_url
self.role_db = MongoClient(self.db_url)['Sylviorus']['CUSTOM_ROLES']
def already_exists(self, user_id):
x = self.db.find_one({"user_id": user_id})
if x:
return True
else:
return False
def add_role(self, user_id, role):
if self.role_db.find_one({"user_id": user_id}):
self.role_db.update_one({"user_id": user_id},
{"$set": {
"role": role
}})
else:
self.role_db.insert_one({"user_id": user_id, "role": role})
def get_role(self, user_id):
if self.role_db.find_one({"user_id": user_id}):
role = self.role_db.find_one({"user_id": user_id})
final = {
"user_id": role['user_id'],
"role": role['role'],
"status": True,
}
return final
else:
return {"status": False}
class LocalDb:
def __init__(self, db_name) -> None:
self.db_name = f"{db_name}.json"
def create_db(self):
x = open(self.db_name, "w+")
x.write("""{\"hello" : "world\"}""")
x.close()
def add_reason(self, key, value):
if os.path.exists(self.db_name):
with open(self.db_name) as f:
db = json.load(f)
db.update({key: value})
with open(self.db_name, "w") as f:
json.dump(db, f)
else:
return "Create db first"
def get_reason(self, key):
if os.path.exists(self.db_name):
with open(self.db_name, "r") as f:
db = json.load(f)
return db[key]
else:
return "No Data"
| true | true |
1c38290c557a6708c9109fea74a5db7debcf6324 | 3,521 | py | Python | school/models.py | ntwaliandy/school-system | 93b81f62a21ae477ac524a99d2fbb6f16ef2efae | [
"MIT"
] | null | null | null | school/models.py | ntwaliandy/school-system | 93b81f62a21ae477ac524a99d2fbb6f16ef2efae | [
"MIT"
] | null | null | null | school/models.py | ntwaliandy/school-system | 93b81f62a21ae477ac524a99d2fbb6f16ef2efae | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class TeacherExtra(models.Model):
user=models.OneToOneField(User,on_delete=models.CASCADE)
salary = models.PositiveIntegerField(null=False)
joindate=models.DateField(auto_now_add=True)
mobile = models.CharField(max_length=40)
status=models.BooleanField(default=False)
def __str__(self):
return self.user.first_name
@property
def get_id(self):
return self.user.id
@property
def get_name(self):
return self.user.first_name+" "+self.user.last_name
classes=[('one','one'),('two','two'),('three','three'),
('four','four'),('five','five'),('six','six'),('seven','seven'),('eight','eight'),('nine','nine'),('ten','ten')]
class StudentExtra(models.Model):
user=models.OneToOneField(User,on_delete=models.CASCADE)
roll = models.CharField(max_length=10)
mobile = models.CharField(max_length=40,null=True)
fee=models.PositiveIntegerField(null=True)
cl= models.CharField(max_length=10,choices=classes,default='one')
status=models.BooleanField(default=False)
@property
def get_name(self):
return self.user.first_name+" "+self.user.last_name
@property
def get_id(self):
return self.user.id
def __str__(self):
return self.user.first_name + ' ' + self.user.last_name + ' ==>> ' + self.cl + ' ' + 'level'
class Attendance(models.Model):
roll=models.CharField(max_length=10,null=True)
date=models.DateField()
cl=models.CharField(max_length=10)
present_status = models.CharField(max_length=10)
def __str__(self):
return str(self.date) + ' ' + self.present_status
class Notice(models.Model):
date=models.DateField(auto_now=True)
by=models.CharField(max_length=20,null=True,default='school')
message=models.CharField(max_length=500)
file=models.FileField()
def __str__(self):
return self.message + ' by ' + self.by
class Notes(models.Model):
date=models.DateField(auto_now=True)
by=models.CharField(max_length=30,null=True, default='Your Name')
year=models.CharField(max_length=10, choices=classes, default='one')
roll=models.CharField(max_length=10)
message=models.CharField(max_length=500)
file=models.FileField()
def __str__(self):
return self.message + ' by ' + self.by
class Grading(models.Model):
date=models.DateField(auto_now=True)
by=models.CharField(max_length=30, null=True, default='Your Name')
student_first_name=models.CharField(max_length=30, null=True)
student_last_name=models.CharField(max_length=30, null=True)
roll=models.CharField(max_length=50)
course_unit=models.CharField(max_length=100)
assignments=models.DecimalField(max_digits=3, decimal_places=1)
Tests=models.DecimalField(max_digits=6, decimal_places=1)
exam=models.DecimalField(max_digits=6, decimal_places=1)
total=models.DecimalField(max_digits=6, decimal_places=1, default='0.0')
def save(self):
self.total = self.assignments + self.Tests + self.exam
return super(Grading, self).save()
def __str__(self):
return self.student_first_name + ' ==>> ' + self.course_unit
class Help(models.Model):
date=models.DateField(auto_now=True)
message=models.TextField(blank=True, max_length=1000)
student=models.CharField(max_length=30, null=True, default='student name')
def __str__(self):
return self.student + ' ' + 'complaint/s'
| 36.298969 | 112 | 0.703777 | from django.db import models
from django.contrib.auth.models import User
class TeacherExtra(models.Model):
user=models.OneToOneField(User,on_delete=models.CASCADE)
salary = models.PositiveIntegerField(null=False)
joindate=models.DateField(auto_now_add=True)
mobile = models.CharField(max_length=40)
status=models.BooleanField(default=False)
def __str__(self):
return self.user.first_name
@property
def get_id(self):
return self.user.id
@property
def get_name(self):
return self.user.first_name+" "+self.user.last_name
classes=[('one','one'),('two','two'),('three','three'),
('four','four'),('five','five'),('six','six'),('seven','seven'),('eight','eight'),('nine','nine'),('ten','ten')]
class StudentExtra(models.Model):
user=models.OneToOneField(User,on_delete=models.CASCADE)
roll = models.CharField(max_length=10)
mobile = models.CharField(max_length=40,null=True)
fee=models.PositiveIntegerField(null=True)
cl= models.CharField(max_length=10,choices=classes,default='one')
status=models.BooleanField(default=False)
@property
def get_name(self):
return self.user.first_name+" "+self.user.last_name
@property
def get_id(self):
return self.user.id
def __str__(self):
return self.user.first_name + ' ' + self.user.last_name + ' ==>> ' + self.cl + ' ' + 'level'
class Attendance(models.Model):
roll=models.CharField(max_length=10,null=True)
date=models.DateField()
cl=models.CharField(max_length=10)
present_status = models.CharField(max_length=10)
def __str__(self):
return str(self.date) + ' ' + self.present_status
class Notice(models.Model):
date=models.DateField(auto_now=True)
by=models.CharField(max_length=20,null=True,default='school')
message=models.CharField(max_length=500)
file=models.FileField()
def __str__(self):
return self.message + ' by ' + self.by
class Notes(models.Model):
date=models.DateField(auto_now=True)
by=models.CharField(max_length=30,null=True, default='Your Name')
year=models.CharField(max_length=10, choices=classes, default='one')
roll=models.CharField(max_length=10)
message=models.CharField(max_length=500)
file=models.FileField()
def __str__(self):
return self.message + ' by ' + self.by
class Grading(models.Model):
date=models.DateField(auto_now=True)
by=models.CharField(max_length=30, null=True, default='Your Name')
student_first_name=models.CharField(max_length=30, null=True)
student_last_name=models.CharField(max_length=30, null=True)
roll=models.CharField(max_length=50)
course_unit=models.CharField(max_length=100)
assignments=models.DecimalField(max_digits=3, decimal_places=1)
Tests=models.DecimalField(max_digits=6, decimal_places=1)
exam=models.DecimalField(max_digits=6, decimal_places=1)
total=models.DecimalField(max_digits=6, decimal_places=1, default='0.0')
def save(self):
self.total = self.assignments + self.Tests + self.exam
return super(Grading, self).save()
def __str__(self):
return self.student_first_name + ' ==>> ' + self.course_unit
class Help(models.Model):
date=models.DateField(auto_now=True)
message=models.TextField(blank=True, max_length=1000)
student=models.CharField(max_length=30, null=True, default='student name')
def __str__(self):
return self.student + ' ' + 'complaint/s'
| true | true |
1c38298251177b9e862ba52d259770df7599f35e | 11,130 | py | Python | tests/func/test_scm.py | lucasalavapena/dvc | 230eb7087df7f063ded7422af7ae45bd04eb794a | [
"Apache-2.0"
] | null | null | null | tests/func/test_scm.py | lucasalavapena/dvc | 230eb7087df7f063ded7422af7ae45bd04eb794a | [
"Apache-2.0"
] | 87 | 2021-04-27T08:17:31.000Z | 2022-03-30T12:12:40.000Z | tests/func/test_scm.py | lucasalavapena/dvc | 230eb7087df7f063ded7422af7ae45bd04eb794a | [
"Apache-2.0"
] | null | null | null | import os
import sys
import pytest
from git import Repo
from dvc.scm import SCM, Git, NoSCM
from dvc.scm.base import SCMError
from dvc.system import System
from tests.basic_env import TestGitSubmodule
from tests.utils import get_gitignore_content
def test_init_none(tmp_dir):
assert isinstance(SCM(os.fspath(tmp_dir), no_scm=True), NoSCM)
def test_init_git(tmp_dir):
Repo.init(os.fspath(tmp_dir))
assert isinstance(SCM(os.fspath(tmp_dir)), Git)
def test_init_no_git(tmp_dir):
with pytest.raises(SCMError):
SCM(os.fspath(tmp_dir))
def test_init_sub_dir(tmp_dir):
Repo.init(os.fspath(tmp_dir))
subdir = tmp_dir / "dir"
subdir.mkdir()
scm = SCM(os.fspath(subdir))
assert scm.root_dir == os.fspath(tmp_dir)
def test_commit(tmp_dir, scm):
tmp_dir.gen({"foo": "foo"})
scm.add(["foo"])
scm.commit("add")
assert "foo" in scm.gitpython.git.ls_files()
def test_is_tracked(tmp_dir, scm):
tmp_dir.gen({"foo": "foo", "тест": "проверка"})
scm.add(["foo", "тест"])
abs_foo = os.path.abspath("foo")
assert scm.is_tracked(abs_foo)
assert scm.is_tracked("foo")
assert scm.is_tracked("тест")
scm.commit("add")
assert scm.is_tracked(abs_foo)
assert scm.is_tracked("foo")
scm.gitpython.repo.index.remove(["foo"], working_tree=True)
assert not scm.is_tracked(abs_foo)
assert not scm.is_tracked("foo")
assert not scm.is_tracked("not-existing-file")
class TestSCMGitSubmodule(TestGitSubmodule):
def test_git_submodule(self):
self.assertIsInstance(SCM(os.curdir), Git)
def test_commit_in_submodule(self):
G = Git(self._root_dir)
G.add(["foo"])
G.commit("add")
self.assertTrue("foo" in self.git.git.ls_files())
def _count_gitignore_entries(line):
lines = get_gitignore_content()
return lines.count(line)
def test_ignore(tmp_dir, scm):
foo = os.fspath(tmp_dir / "foo")
target = "/foo"
scm.ignore(foo)
assert (tmp_dir / ".gitignore").is_file()
scm._reset()
assert _count_gitignore_entries(target) == 1
scm.ignore(foo)
assert (tmp_dir / ".gitignore").is_file()
scm._reset()
assert _count_gitignore_entries(target) == 1
scm.ignore_remove(foo)
assert not (tmp_dir / ".gitignore").exists()
def test_ignored(tmp_dir, scm):
tmp_dir.gen({"dir1": {"file1.jpg": "cont", "file2.txt": "cont"}})
tmp_dir.gen({".gitignore": "dir1/*.jpg"})
assert scm.is_ignored(tmp_dir / "dir1" / "file1.jpg")
assert not scm.is_ignored(tmp_dir / "dir1" / "file2.txt")
def test_ignored_dir_unignored_subdirs(tmp_dir, scm):
tmp_dir.gen({".gitignore": "data/**\n!data/**/\n!data/**/*.csv"})
scm.add([".gitignore"])
tmp_dir.gen(
{
os.path.join("data", "raw", "tracked.csv"): "cont",
os.path.join("data", "raw", "not_tracked.json"): "cont",
}
)
assert not scm.is_ignored(tmp_dir / "data" / "raw" / "tracked.csv")
assert scm.is_ignored(tmp_dir / "data" / "raw" / "not_tracked.json")
assert not scm.is_ignored(tmp_dir / "data" / "raw" / "non_existent.csv")
assert scm.is_ignored(tmp_dir / "data" / "raw" / "non_existent.json")
assert not scm.is_ignored(tmp_dir / "data" / "non_existent.csv")
assert scm.is_ignored(tmp_dir / "data" / "non_existent.json")
assert not scm.is_ignored(f"data{os.sep}")
# git check-ignore would now mark "data/raw" as ignored
# after detecting it's a directory in the file system;
# instead, we rely on the trailing separator to determine if handling a
# a directory - for consistency between existent and non-existent paths
assert scm.is_ignored(os.path.join("data", "raw"))
assert not scm.is_ignored(os.path.join("data", f"raw{os.sep}"))
assert scm.is_ignored(os.path.join("data", "non_existent"))
assert not scm.is_ignored(os.path.join("data", f"non_existent{os.sep}"))
def test_get_gitignore(tmp_dir, scm):
tmp_dir.gen({"file1": "contents", "dir": {}})
data_dir = os.fspath(tmp_dir / "file1")
entry, gitignore = scm._get_gitignore(data_dir)
assert entry == "/file1"
assert gitignore == os.fspath(tmp_dir / ".gitignore")
data_dir = os.fspath(tmp_dir / "dir")
entry, gitignore = scm._get_gitignore(data_dir)
assert entry == "/dir"
assert gitignore == os.fspath(tmp_dir / ".gitignore")
def test_get_gitignore_symlink(tmp_dir, scm):
tmp_dir.gen({"dir": {"subdir": {"data": "contents"}}})
link = os.fspath(tmp_dir / "link")
target = os.fspath(tmp_dir / "dir" / "subdir" / "data")
System.symlink(target, link)
entry, gitignore = scm._get_gitignore(link)
assert entry == "/link"
assert gitignore == os.fspath(tmp_dir / ".gitignore")
def test_get_gitignore_subdir(tmp_dir, scm):
tmp_dir.gen({"dir1": {"file1": "cont", "dir2": {}}})
data_dir = os.fspath(tmp_dir / "dir1" / "file1")
entry, gitignore = scm._get_gitignore(data_dir)
assert entry == "/file1"
assert gitignore == os.fspath(tmp_dir / "dir1" / ".gitignore")
data_dir = os.fspath(tmp_dir / "dir1" / "dir2")
entry, gitignore = scm._get_gitignore(data_dir)
assert entry == "/dir2"
assert gitignore == os.fspath(tmp_dir / "dir1" / ".gitignore")
def test_gitignore_should_end_with_newline(tmp_dir, scm):
tmp_dir.gen({"foo": "foo", "bar": "bar"})
foo = os.fspath(tmp_dir / "foo")
bar = os.fspath(tmp_dir / "bar")
gitignore = tmp_dir / ".gitignore"
scm.ignore(foo)
assert gitignore.read_text().endswith("\n")
scm.ignore(bar)
assert gitignore.read_text().endswith("\n")
def test_gitignore_should_append_newline_to_gitignore(tmp_dir, scm):
tmp_dir.gen({"foo": "foo", "bar": "bar"})
bar_path = os.fspath(tmp_dir / "bar")
gitignore = tmp_dir / ".gitignore"
gitignore.write_text("/foo")
assert not gitignore.read_text().endswith("\n")
scm.ignore(bar_path)
contents = gitignore.read_text()
assert gitignore.read_text().endswith("\n")
assert contents.splitlines() == ["/foo", "/bar"]
def test_git_detach_head(tmp_dir, scm):
tmp_dir.scm_gen({"file": "0"}, commit="init")
init_rev = scm.get_rev()
with scm.detach_head() as rev:
assert init_rev == rev
assert init_rev == (tmp_dir / ".git" / "HEAD").read_text().strip()
assert (
"ref: refs/heads/master"
== (tmp_dir / ".git" / "HEAD").read_text().strip()
)
def test_git_stash_workspace(tmp_dir, scm):
tmp_dir.scm_gen({"file": "0"}, commit="init")
tmp_dir.gen("file", "1")
with scm.stash_workspace():
assert not scm.is_dirty()
assert "0" == (tmp_dir / "file").read_text()
assert scm.is_dirty()
assert "1" == (tmp_dir / "file").read_text()
@pytest.mark.parametrize(
"ref, include_untracked",
[
(None, True),
(None, False),
("refs/foo/stash", True),
("refs/foo/stash", False),
],
)
def test_git_stash_push(tmp_dir, scm, ref, include_untracked):
from dvc.scm.git import Stash
tmp_dir.scm_gen({"file": "0"}, commit="init")
tmp_dir.gen({"file": "1", "untracked": "0"})
stash = Stash(scm, ref=ref)
rev = stash.push(include_untracked=include_untracked)
assert rev == scm.get_ref(stash.ref)
assert "0" == (tmp_dir / "file").read_text()
assert include_untracked != (tmp_dir / "untracked").exists()
assert len(stash) == 1
stash.apply(rev)
assert "1" == (tmp_dir / "file").read_text()
assert "0" == (tmp_dir / "untracked").read_text()
parts = list(stash.ref.split("/"))
assert os.path.exists(os.path.join(os.fspath(tmp_dir), ".git", *parts))
assert os.path.exists(
os.path.join(os.fspath(tmp_dir), ".git", "logs", *parts)
)
@pytest.mark.parametrize("ref", [None, "refs/foo/stash"])
def test_git_stash_drop(tmp_dir, scm, ref):
from dvc.scm.git import Stash
tmp_dir.scm_gen({"file": "0"}, commit="init")
tmp_dir.gen("file", "1")
stash = Stash(scm, ref=ref)
stash.push()
tmp_dir.gen("file", "2")
expected = stash.push()
stash.drop(1)
assert expected == scm.get_ref(stash.ref)
assert len(stash) == 1
reason = """libgit2 stash_save() is flaky on linux when run inside pytest
https://github.com/iterative/dvc/pull/5286#issuecomment-792574294"""
@pytest.mark.parametrize(
"ref",
[
pytest.param(
None,
marks=pytest.mark.xfail(
sys.platform == "linux", raises=AssertionError, reason=reason
),
),
"refs/foo/stash",
],
)
def test_git_stash_pop(tmp_dir, scm, ref):
from dvc.scm.git import Stash
tmp_dir.scm_gen({"file": "0"}, commit="init")
tmp_dir.gen("file", "1")
stash = Stash(scm, ref=ref)
first = stash.push()
tmp_dir.gen("file", "2")
second = stash.push()
assert second == stash.pop()
assert len(stash) == 1
assert first == scm.get_ref(stash.ref)
assert "2" == (tmp_dir / "file").read_text()
@pytest.mark.parametrize("ref", [None, "refs/foo/stash"])
def test_git_stash_clear(tmp_dir, scm, ref):
from dvc.scm.git import Stash
tmp_dir.scm_gen({"file": "0"}, commit="init")
tmp_dir.gen("file", "1")
stash = Stash(scm, ref=ref)
stash.push()
tmp_dir.gen("file", "2")
stash.push()
stash.clear()
assert len(stash) == 0
parts = list(stash.ref.split("/"))
assert not os.path.exists(os.path.join(os.fspath(tmp_dir), ".git", *parts))
# NOTE: some backends will completely remove reflog file on clear, some
# will only truncate it, either case means an empty stash
log_path = os.path.join(os.fspath(tmp_dir), ".git", "logs", *parts)
assert (
not os.path.exists(log_path)
or not open(log_path, encoding="utf-8").read()
)
@pytest.mark.needs_internet
@pytest.mark.parametrize("server", [pytest.lazy_fixture("git_ssh")])
def test_git_ssh(tmp_dir, scm, server):
from dulwich.repo import Repo as DulwichRepo
from sshfs import SSHFileSystem
from dvc.utils.fs import remove
from tests.remotes.ssh import TEST_SSH_KEY_PATH, TEST_SSH_USER
fs = SSHFileSystem(
host=server.host,
port=server.port,
username=TEST_SSH_USER,
client_keys=[TEST_SSH_KEY_PATH],
)
server._ssh.execute("git init --bare test-repo.git")
url = f"ssh://{TEST_SSH_USER}@{server.host}:{server.port}/~/test-repo.git"
tmp_dir.scm_gen("foo", "foo", commit="init")
rev = scm.get_rev()
scm.push_refspec(
url,
"refs/heads/master",
"refs/heads/master",
force=True,
key_filename=TEST_SSH_KEY_PATH,
)
assert (
rev.encode("ascii")
== fs.open("test-repo.git/refs/heads/master").read().strip()
)
remove(tmp_dir / ".git")
remove(tmp_dir / "foo")
DulwichRepo.init(str(tmp_dir))
scm.fetch_refspecs(
url,
["refs/heads/master"],
force=True,
key_filename=TEST_SSH_KEY_PATH,
)
assert rev == scm.get_ref("refs/heads/master")
scm.checkout("master")
assert "foo" == (tmp_dir / "foo").read_text()
| 29.060052 | 79 | 0.637556 | import os
import sys
import pytest
from git import Repo
from dvc.scm import SCM, Git, NoSCM
from dvc.scm.base import SCMError
from dvc.system import System
from tests.basic_env import TestGitSubmodule
from tests.utils import get_gitignore_content
def test_init_none(tmp_dir):
assert isinstance(SCM(os.fspath(tmp_dir), no_scm=True), NoSCM)
def test_init_git(tmp_dir):
Repo.init(os.fspath(tmp_dir))
assert isinstance(SCM(os.fspath(tmp_dir)), Git)
def test_init_no_git(tmp_dir):
with pytest.raises(SCMError):
SCM(os.fspath(tmp_dir))
def test_init_sub_dir(tmp_dir):
Repo.init(os.fspath(tmp_dir))
subdir = tmp_dir / "dir"
subdir.mkdir()
scm = SCM(os.fspath(subdir))
assert scm.root_dir == os.fspath(tmp_dir)
def test_commit(tmp_dir, scm):
tmp_dir.gen({"foo": "foo"})
scm.add(["foo"])
scm.commit("add")
assert "foo" in scm.gitpython.git.ls_files()
def test_is_tracked(tmp_dir, scm):
tmp_dir.gen({"foo": "foo", "тест": "проверка"})
scm.add(["foo", "тест"])
abs_foo = os.path.abspath("foo")
assert scm.is_tracked(abs_foo)
assert scm.is_tracked("foo")
assert scm.is_tracked("тест")
scm.commit("add")
assert scm.is_tracked(abs_foo)
assert scm.is_tracked("foo")
scm.gitpython.repo.index.remove(["foo"], working_tree=True)
assert not scm.is_tracked(abs_foo)
assert not scm.is_tracked("foo")
assert not scm.is_tracked("not-existing-file")
class TestSCMGitSubmodule(TestGitSubmodule):
def test_git_submodule(self):
self.assertIsInstance(SCM(os.curdir), Git)
def test_commit_in_submodule(self):
G = Git(self._root_dir)
G.add(["foo"])
G.commit("add")
self.assertTrue("foo" in self.git.git.ls_files())
def _count_gitignore_entries(line):
lines = get_gitignore_content()
return lines.count(line)
def test_ignore(tmp_dir, scm):
foo = os.fspath(tmp_dir / "foo")
target = "/foo"
scm.ignore(foo)
assert (tmp_dir / ".gitignore").is_file()
scm._reset()
assert _count_gitignore_entries(target) == 1
scm.ignore(foo)
assert (tmp_dir / ".gitignore").is_file()
scm._reset()
assert _count_gitignore_entries(target) == 1
scm.ignore_remove(foo)
assert not (tmp_dir / ".gitignore").exists()
def test_ignored(tmp_dir, scm):
tmp_dir.gen({"dir1": {"file1.jpg": "cont", "file2.txt": "cont"}})
tmp_dir.gen({".gitignore": "dir1/*.jpg"})
assert scm.is_ignored(tmp_dir / "dir1" / "file1.jpg")
assert not scm.is_ignored(tmp_dir / "dir1" / "file2.txt")
def test_ignored_dir_unignored_subdirs(tmp_dir, scm):
tmp_dir.gen({".gitignore": "data/**\n!data/**/\n!data/**/*.csv"})
scm.add([".gitignore"])
tmp_dir.gen(
{
os.path.join("data", "raw", "tracked.csv"): "cont",
os.path.join("data", "raw", "not_tracked.json"): "cont",
}
)
assert not scm.is_ignored(tmp_dir / "data" / "raw" / "tracked.csv")
assert scm.is_ignored(tmp_dir / "data" / "raw" / "not_tracked.json")
assert not scm.is_ignored(tmp_dir / "data" / "raw" / "non_existent.csv")
assert scm.is_ignored(tmp_dir / "data" / "raw" / "non_existent.json")
assert not scm.is_ignored(tmp_dir / "data" / "non_existent.csv")
assert scm.is_ignored(tmp_dir / "data" / "non_existent.json")
assert not scm.is_ignored(f"data{os.sep}")
# instead, we rely on the trailing separator to determine if handling a
# a directory - for consistency between existent and non-existent paths
assert scm.is_ignored(os.path.join("data", "raw"))
assert not scm.is_ignored(os.path.join("data", f"raw{os.sep}"))
assert scm.is_ignored(os.path.join("data", "non_existent"))
assert not scm.is_ignored(os.path.join("data", f"non_existent{os.sep}"))
def test_get_gitignore(tmp_dir, scm):
tmp_dir.gen({"file1": "contents", "dir": {}})
data_dir = os.fspath(tmp_dir / "file1")
entry, gitignore = scm._get_gitignore(data_dir)
assert entry == "/file1"
assert gitignore == os.fspath(tmp_dir / ".gitignore")
data_dir = os.fspath(tmp_dir / "dir")
entry, gitignore = scm._get_gitignore(data_dir)
assert entry == "/dir"
assert gitignore == os.fspath(tmp_dir / ".gitignore")
def test_get_gitignore_symlink(tmp_dir, scm):
tmp_dir.gen({"dir": {"subdir": {"data": "contents"}}})
link = os.fspath(tmp_dir / "link")
target = os.fspath(tmp_dir / "dir" / "subdir" / "data")
System.symlink(target, link)
entry, gitignore = scm._get_gitignore(link)
assert entry == "/link"
assert gitignore == os.fspath(tmp_dir / ".gitignore")
def test_get_gitignore_subdir(tmp_dir, scm):
tmp_dir.gen({"dir1": {"file1": "cont", "dir2": {}}})
data_dir = os.fspath(tmp_dir / "dir1" / "file1")
entry, gitignore = scm._get_gitignore(data_dir)
assert entry == "/file1"
assert gitignore == os.fspath(tmp_dir / "dir1" / ".gitignore")
data_dir = os.fspath(tmp_dir / "dir1" / "dir2")
entry, gitignore = scm._get_gitignore(data_dir)
assert entry == "/dir2"
assert gitignore == os.fspath(tmp_dir / "dir1" / ".gitignore")
def test_gitignore_should_end_with_newline(tmp_dir, scm):
tmp_dir.gen({"foo": "foo", "bar": "bar"})
foo = os.fspath(tmp_dir / "foo")
bar = os.fspath(tmp_dir / "bar")
gitignore = tmp_dir / ".gitignore"
scm.ignore(foo)
assert gitignore.read_text().endswith("\n")
scm.ignore(bar)
assert gitignore.read_text().endswith("\n")
def test_gitignore_should_append_newline_to_gitignore(tmp_dir, scm):
tmp_dir.gen({"foo": "foo", "bar": "bar"})
bar_path = os.fspath(tmp_dir / "bar")
gitignore = tmp_dir / ".gitignore"
gitignore.write_text("/foo")
assert not gitignore.read_text().endswith("\n")
scm.ignore(bar_path)
contents = gitignore.read_text()
assert gitignore.read_text().endswith("\n")
assert contents.splitlines() == ["/foo", "/bar"]
def test_git_detach_head(tmp_dir, scm):
tmp_dir.scm_gen({"file": "0"}, commit="init")
init_rev = scm.get_rev()
with scm.detach_head() as rev:
assert init_rev == rev
assert init_rev == (tmp_dir / ".git" / "HEAD").read_text().strip()
assert (
"ref: refs/heads/master"
== (tmp_dir / ".git" / "HEAD").read_text().strip()
)
def test_git_stash_workspace(tmp_dir, scm):
tmp_dir.scm_gen({"file": "0"}, commit="init")
tmp_dir.gen("file", "1")
with scm.stash_workspace():
assert not scm.is_dirty()
assert "0" == (tmp_dir / "file").read_text()
assert scm.is_dirty()
assert "1" == (tmp_dir / "file").read_text()
@pytest.mark.parametrize(
"ref, include_untracked",
[
(None, True),
(None, False),
("refs/foo/stash", True),
("refs/foo/stash", False),
],
)
def test_git_stash_push(tmp_dir, scm, ref, include_untracked):
from dvc.scm.git import Stash
tmp_dir.scm_gen({"file": "0"}, commit="init")
tmp_dir.gen({"file": "1", "untracked": "0"})
stash = Stash(scm, ref=ref)
rev = stash.push(include_untracked=include_untracked)
assert rev == scm.get_ref(stash.ref)
assert "0" == (tmp_dir / "file").read_text()
assert include_untracked != (tmp_dir / "untracked").exists()
assert len(stash) == 1
stash.apply(rev)
assert "1" == (tmp_dir / "file").read_text()
assert "0" == (tmp_dir / "untracked").read_text()
parts = list(stash.ref.split("/"))
assert os.path.exists(os.path.join(os.fspath(tmp_dir), ".git", *parts))
assert os.path.exists(
os.path.join(os.fspath(tmp_dir), ".git", "logs", *parts)
)
@pytest.mark.parametrize("ref", [None, "refs/foo/stash"])
def test_git_stash_drop(tmp_dir, scm, ref):
from dvc.scm.git import Stash
tmp_dir.scm_gen({"file": "0"}, commit="init")
tmp_dir.gen("file", "1")
stash = Stash(scm, ref=ref)
stash.push()
tmp_dir.gen("file", "2")
expected = stash.push()
stash.drop(1)
assert expected == scm.get_ref(stash.ref)
assert len(stash) == 1
reason = """libgit2 stash_save() is flaky on linux when run inside pytest
https://github.com/iterative/dvc/pull/5286#issuecomment-792574294"""
@pytest.mark.parametrize(
"ref",
[
pytest.param(
None,
marks=pytest.mark.xfail(
sys.platform == "linux", raises=AssertionError, reason=reason
),
),
"refs/foo/stash",
],
)
def test_git_stash_pop(tmp_dir, scm, ref):
from dvc.scm.git import Stash
tmp_dir.scm_gen({"file": "0"}, commit="init")
tmp_dir.gen("file", "1")
stash = Stash(scm, ref=ref)
first = stash.push()
tmp_dir.gen("file", "2")
second = stash.push()
assert second == stash.pop()
assert len(stash) == 1
assert first == scm.get_ref(stash.ref)
assert "2" == (tmp_dir / "file").read_text()
@pytest.mark.parametrize("ref", [None, "refs/foo/stash"])
def test_git_stash_clear(tmp_dir, scm, ref):
from dvc.scm.git import Stash
tmp_dir.scm_gen({"file": "0"}, commit="init")
tmp_dir.gen("file", "1")
stash = Stash(scm, ref=ref)
stash.push()
tmp_dir.gen("file", "2")
stash.push()
stash.clear()
assert len(stash) == 0
parts = list(stash.ref.split("/"))
assert not os.path.exists(os.path.join(os.fspath(tmp_dir), ".git", *parts))
# NOTE: some backends will completely remove reflog file on clear, some
# will only truncate it, either case means an empty stash
log_path = os.path.join(os.fspath(tmp_dir), ".git", "logs", *parts)
assert (
not os.path.exists(log_path)
or not open(log_path, encoding="utf-8").read()
)
@pytest.mark.needs_internet
@pytest.mark.parametrize("server", [pytest.lazy_fixture("git_ssh")])
def test_git_ssh(tmp_dir, scm, server):
from dulwich.repo import Repo as DulwichRepo
from sshfs import SSHFileSystem
from dvc.utils.fs import remove
from tests.remotes.ssh import TEST_SSH_KEY_PATH, TEST_SSH_USER
fs = SSHFileSystem(
host=server.host,
port=server.port,
username=TEST_SSH_USER,
client_keys=[TEST_SSH_KEY_PATH],
)
server._ssh.execute("git init --bare test-repo.git")
url = f"ssh://{TEST_SSH_USER}@{server.host}:{server.port}/~/test-repo.git"
tmp_dir.scm_gen("foo", "foo", commit="init")
rev = scm.get_rev()
scm.push_refspec(
url,
"refs/heads/master",
"refs/heads/master",
force=True,
key_filename=TEST_SSH_KEY_PATH,
)
assert (
rev.encode("ascii")
== fs.open("test-repo.git/refs/heads/master").read().strip()
)
remove(tmp_dir / ".git")
remove(tmp_dir / "foo")
DulwichRepo.init(str(tmp_dir))
scm.fetch_refspecs(
url,
["refs/heads/master"],
force=True,
key_filename=TEST_SSH_KEY_PATH,
)
assert rev == scm.get_ref("refs/heads/master")
scm.checkout("master")
assert "foo" == (tmp_dir / "foo").read_text()
| true | true |
1c382bdec7b3a619d5432493cd7bf74eed54be9e | 1,236 | py | Python | src/ikea_api/endpoints/search.py | RCRalph/ikea-api-client | 51c71bfc423d0b995f60add11837d7860d14f754 | [
"MIT"
] | null | null | null | src/ikea_api/endpoints/search.py | RCRalph/ikea-api-client | 51c71bfc423d0b995f60add11837d7860d14f754 | [
"MIT"
] | null | null | null | src/ikea_api/endpoints/search.py | RCRalph/ikea-api-client | 51c71bfc423d0b995f60add11837d7860d14f754 | [
"MIT"
] | null | null | null | from __future__ import annotations
from typing import Any, Literal
from ikea_api.abc import Endpoint, SessionInfo, endpoint
from ikea_api.base_ikea_api import BaseIkeaAPI
from ikea_api.error_handlers import handle_json_decode_error
SearchType = Literal["PRODUCT", "CONTENT", "PLANNER", "REFINED_SEARCHES", "ANSWER"]
class Search(BaseIkeaAPI):
def _get_session_info(self) -> SessionInfo:
url = f"https://sik.search.blue.cdtapps.com/{self._const.country}/{self._const.language}/search-result-page"
return SessionInfo(base_url=url, headers=self._extend_default_headers({}))
@endpoint(handlers=[handle_json_decode_error])
def search(
self,
query: str,
*,
limit: int = 24,
types: list[SearchType] = ["PRODUCT"],
) -> Endpoint[dict[str, Any]]:
params = {
"autocorrect": "true",
"subcategories-style": "tree-navigation",
"types": ",".join(types),
"q": query,
"size": limit,
"c": "sr", # API client: sr - search results, sb - search bar
"v": "20210322", # API version
}
response = yield self._RequestInfo("GET", params=params)
return response.json
| 34.333333 | 116 | 0.631877 | from __future__ import annotations
from typing import Any, Literal
from ikea_api.abc import Endpoint, SessionInfo, endpoint
from ikea_api.base_ikea_api import BaseIkeaAPI
from ikea_api.error_handlers import handle_json_decode_error
SearchType = Literal["PRODUCT", "CONTENT", "PLANNER", "REFINED_SEARCHES", "ANSWER"]
class Search(BaseIkeaAPI):
def _get_session_info(self) -> SessionInfo:
url = f"https://sik.search.blue.cdtapps.com/{self._const.country}/{self._const.language}/search-result-page"
return SessionInfo(base_url=url, headers=self._extend_default_headers({}))
@endpoint(handlers=[handle_json_decode_error])
def search(
self,
query: str,
*,
limit: int = 24,
types: list[SearchType] = ["PRODUCT"],
) -> Endpoint[dict[str, Any]]:
params = {
"autocorrect": "true",
"subcategories-style": "tree-navigation",
"types": ",".join(types),
"q": query,
"size": limit,
"c": "sr",
"v": "20210322",
}
response = yield self._RequestInfo("GET", params=params)
return response.json
| true | true |
1c382c4356470aefb4f62003845ae4e4c6777f19 | 1,608 | py | Python | CFM_main/reader.py | michaelleerilee/CommunityFirnModel | 312fc30b62b7e36a609660e5b10e3269eb090bae | [
"MIT"
] | null | null | null | CFM_main/reader.py | michaelleerilee/CommunityFirnModel | 312fc30b62b7e36a609660e5b10e3269eb090bae | [
"MIT"
] | null | null | null | CFM_main/reader.py | michaelleerilee/CommunityFirnModel | 312fc30b62b7e36a609660e5b10e3269eb090bae | [
"MIT"
] | null | null | null | #!usr/bin/env python
'''
Functions to read model inputs.
'''
import os
import numpy as np
# from string import join
from constants import *
import h5py
def read_input(filename,StartDate=None):
'''
Read in data from csv input files
:param filename: name of the file which holds the accumulation rate data
:return input_data: vector of field of interest (e.g. temperature, accumulation rate from a specified csv file
:return input_year: corresponding time vector (in years)
'''
spot = os.getcwd()
FID = os.path.join(spot, filename)
data = np.loadtxt(FID, delimiter=',') #changed 3/6/17 to loadtxt from genfromtxt; much faster
xx,yy = np.shape(data)
if xx>yy:
input_year = data[:, 0]
input_data = data[:, 1]
else:
input_year = data[0, :]
input_data = data[1, :]
input_year_full = input_year.copy()
input_data_full = input_data.copy()
if StartDate==None:
pass
else:
StartInd = np.where(input_year>=StartDate)[0]
input_year = input_year[StartInd]
input_data = input_data[StartInd]
return input_data, input_year, input_data_full, input_year_full
def read_init(folder, resultsFileName, varname):
'''
Read in data for initial depth, age, density, and temperature to run the model without spinup
:param folder: the folder containing the files holding depth, age, density, and temperature
'''
f5 = h5py.File(os.path.join(folder, resultsFileName),'r')
init_value = f5[varname][:]
f5.close()
return init_value
| 26.8 | 114 | 0.659204 |
import os
import numpy as np
from constants import *
import h5py
def read_input(filename,StartDate=None):
spot = os.getcwd()
FID = os.path.join(spot, filename)
data = np.loadtxt(FID, delimiter=',')
xx,yy = np.shape(data)
if xx>yy:
input_year = data[:, 0]
input_data = data[:, 1]
else:
input_year = data[0, :]
input_data = data[1, :]
input_year_full = input_year.copy()
input_data_full = input_data.copy()
if StartDate==None:
pass
else:
StartInd = np.where(input_year>=StartDate)[0]
input_year = input_year[StartInd]
input_data = input_data[StartInd]
return input_data, input_year, input_data_full, input_year_full
def read_init(folder, resultsFileName, varname):
f5 = h5py.File(os.path.join(folder, resultsFileName),'r')
init_value = f5[varname][:]
f5.close()
return init_value
| true | true |
1c382cadea0a62ded77480b47a1fb27b85d96974 | 12,384 | py | Python | third_party/html5lib/tests/test_treewalkers.py | dewitt/appengine-unshorten | 1bb9e277eb67cefbf176b104d3297da5402e67b7 | [
"Apache-2.0"
] | 1 | 2020-09-03T23:55:06.000Z | 2020-09-03T23:55:06.000Z | third_party/html5lib/tests/test_treewalkers.py | dewitt/appengine-unshorten | 1bb9e277eb67cefbf176b104d3297da5402e67b7 | [
"Apache-2.0"
] | null | null | null | third_party/html5lib/tests/test_treewalkers.py | dewitt/appengine-unshorten | 1bb9e277eb67cefbf176b104d3297da5402e67b7 | [
"Apache-2.0"
] | null | null | null | import os
import sys
import StringIO
import unittest
import warnings
warnings.simplefilter("error")
from support import html5lib_test_files, TestData, convertExpected
from html5lib import html5parser, treewalkers, treebuilders, constants
from html5lib.filters.lint import Filter as LintFilter, LintError
def PullDOMAdapter(node):
from xml.dom import Node
from xml.dom.pulldom import START_ELEMENT, END_ELEMENT, COMMENT, CHARACTERS
if node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE):
for childNode in node.childNodes:
for event in PullDOMAdapter(childNode):
yield event
elif node.nodeType == Node.DOCUMENT_TYPE_NODE:
raise NotImplementedError("DOCTYPE nodes are not supported by PullDOM")
elif node.nodeType == Node.COMMENT_NODE:
yield COMMENT, node
elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
yield CHARACTERS, node
elif node.nodeType == Node.ELEMENT_NODE:
yield START_ELEMENT, node
for childNode in node.childNodes:
for event in PullDOMAdapter(childNode):
yield event
yield END_ELEMENT, node
else:
raise NotImplementedError("Node type not supported: " + str(node.nodeType))
treeTypes = {
"simpletree": {"builder": treebuilders.getTreeBuilder("simpletree"),
"walker": treewalkers.getTreeWalker("simpletree")},
"DOM": {"builder": treebuilders.getTreeBuilder("dom"),
"walker": treewalkers.getTreeWalker("dom")},
"PullDOM": {"builder": treebuilders.getTreeBuilder("dom"),
"adapter": PullDOMAdapter,
"walker": treewalkers.getTreeWalker("pulldom")},
}
#Try whatever etree implementations are available from a list that are
#"supposed" to work
try:
import xml.etree.ElementTree as ElementTree
treeTypes['ElementTree'] = \
{"builder": treebuilders.getTreeBuilder("etree", ElementTree),
"walker": treewalkers.getTreeWalker("etree", ElementTree)}
except ImportError:
try:
import elementtree.ElementTree as ElementTree
treeTypes['ElementTree'] = \
{"builder": treebuilders.getTreeBuilder("etree", ElementTree),
"walker": treewalkers.getTreeWalker("etree", ElementTree)}
except ImportError:
pass
try:
import xml.etree.cElementTree as ElementTree
treeTypes['cElementTree'] = \
{"builder": treebuilders.getTreeBuilder("etree", ElementTree),
"walker": treewalkers.getTreeWalker("etree", ElementTree)}
except ImportError:
try:
import cElementTree as ElementTree
treeTypes['cElementTree'] = \
{"builder": treebuilders.getTreeBuilder("etree", ElementTree),
"walker": treewalkers.getTreeWalker("etree", ElementTree)}
except ImportError:
pass
try:
import lxml.etree as ElementTree
# treeTypes['lxml_as_etree'] = \
# {"builder": treebuilders.getTreeBuilder("etree", ElementTree),
# "walker": treewalkers.getTreeWalker("etree", ElementTree)}
treeTypes['lxml_native'] = \
{"builder": treebuilders.getTreeBuilder("lxml"),
"walker": treewalkers.getTreeWalker("lxml")}
except ImportError:
pass
try:
import BeautifulSoup
treeTypes["beautifulsoup"] = \
{"builder": treebuilders.getTreeBuilder("beautifulsoup"),
"walker": treewalkers.getTreeWalker("beautifulsoup")}
except ImportError:
pass
#Try whatever etree implementations are available from a list that are
#"supposed" to work
try:
import pxdom
treeTypes['pxdom'] = \
{"builder": treebuilders.getTreeBuilder("dom", pxdom),
"walker": treewalkers.getTreeWalker("dom")}
except ImportError:
pass
try:
from genshi.core import QName, Attrs
from genshi.core import START, END, TEXT, COMMENT, DOCTYPE
def GenshiAdapter(tree):
text = None
for token in treewalkers.getTreeWalker("simpletree")(tree):
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
if text is None:
text = token["data"]
else:
text += token["data"]
elif text is not None:
yield TEXT, text, (None, -1, -1)
text = None
if type in ("StartTag", "EmptyTag"):
if token["namespace"]:
name = u"{%s}%s" % (token["namespace"], token["name"])
else:
name = token["name"]
yield (START,
(QName(name),
Attrs([(QName(attr),value) for attr,value in token["data"]])),
(None, -1, -1))
if type == "EmptyTag":
type = "EndTag"
if type == "EndTag":
yield END, QName(token["name"]), (None, -1, -1)
elif type == "Comment":
yield COMMENT, token["data"], (None, -1, -1)
elif type == "Doctype":
yield DOCTYPE, (token["name"], token["publicId"],
token["systemId"]), (None, -1, -1)
else:
pass # FIXME: What to do?
if text is not None:
yield TEXT, text, (None, -1, -1)
#treeTypes["genshi"] = \
# {"builder": treebuilders.getTreeBuilder("simpletree"),
# "adapter": GenshiAdapter,
# "walker": treewalkers.getTreeWalker("genshi")}
except ImportError:
pass
def concatenateCharacterTokens(tokens):
charactersToken = None
for token in tokens:
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
if charactersToken is None:
charactersToken = {"type": "Characters", "data": token["data"]}
else:
charactersToken["data"] += token["data"]
else:
if charactersToken is not None:
yield charactersToken
charactersToken = None
yield token
if charactersToken is not None:
yield charactersToken
def convertTokens(tokens):
output = []
indent = 0
for token in concatenateCharacterTokens(tokens):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
if (token["namespace"] and
token["namespace"] != constants.namespaces["html"]):
if token["namespace"] in constants.prefixes:
name = constants.prefixes[token["namespace"]]
else:
name = token["namespace"]
name += u" " + token["name"]
else:
name = token["name"]
output.append(u"%s<%s>" % (" "*indent, name))
indent += 2
attrs = token["data"]
if attrs:
attrs.sort(lambda a,b: cmp(a["name"], b["name"]))
for attr in attrs:
if attr["namespace"]:
if attr["namespace"] in constants.prefixes:
name = constants.prefixes[attr["namespace"]]
else:
name = attr["namespace"]
name += u" " + attr["name"]
else:
name = attr["name"]
output.append(u"%s%s=\"%s\"" % (" "*indent, name, attr["value"]))
if type == "EmptyTag":
indent -= 2
elif type == "EndTag":
indent -= 2
elif type == "Comment":
output.append("%s<!-- %s -->" % (" "*indent, token["data"]))
elif type == "Doctype":
if token["name"]:
if token["publicId"]:
output.append("""%s<!DOCTYPE %s "%s" "%s">"""%
(" "*indent, token["name"],
token["publicId"],
token["systemId"] and token["systemId"] or ""))
elif token["systemId"]:
output.append("""%s<!DOCTYPE %s "" "%s">"""%
(" "*indent, token["name"],
token["systemId"]))
else:
output.append("%s<!DOCTYPE %s>"%(" "*indent,
token["name"]))
else:
output.append("%s<!DOCTYPE >" % (" "*indent,))
elif type in ("Characters", "SpaceCharacters"):
output.append("%s\"%s\"" % (" "*indent, token["data"]))
else:
pass # TODO: what to do with errors?
return u"\n".join(output)
import re
attrlist = re.compile(r"^(\s+)\w+=.*(\n\1\w+=.*)+",re.M)
def sortattrs(x):
lines = x.group(0).split("\n")
lines.sort()
return "\n".join(lines)
class TestCase(unittest.TestCase):
def runTest(self, innerHTML, input, expected, errors, treeClass):
try:
p = html5parser.HTMLParser(tree = treeClass["builder"])
if innerHTML:
document = p.parseFragment(StringIO.StringIO(input), innerHTML)
else:
document = p.parse(StringIO.StringIO(input))
except constants.DataLossWarning:
#Ignore testcases we know we don't pass
return
document = treeClass.get("adapter", lambda x: x)(document)
try:
output = convertTokens(treeClass["walker"](document))
output = attrlist.sub(sortattrs, output)
expected = attrlist.sub(sortattrs, convertExpected(expected))
self.assertEquals(expected, output, "\n".join([
"", "Input:", input,
"", "Expected:", expected,
"", "Received:", output
]))
except NotImplementedError:
pass # Amnesty for those that confess...
class TokenTestCase(unittest.TestCase):
def test_all_tokens(self):
expected = [
{'data': [], 'type': 'StartTag', 'name': u'html'},
{'data': [], 'type': 'StartTag', 'name': u'head'},
{'data': [], 'type': 'EndTag', 'name': u'head'},
{'data': [], 'type': 'StartTag', 'name': u'body'},
{'data': u'a', 'type': 'Characters'},
{'data': [], 'type': 'StartTag', 'name': u'div'},
{'data': u'b', 'type': 'Characters'},
{'data': [], 'type': 'EndTag', 'name': u'div'},
{'data': u'c', 'type': 'Characters'},
{'data': [], 'type': 'EndTag', 'name': u'body'},
{'data': [], 'type': 'EndTag', 'name': u'html'}
]
for treeName, treeCls in treeTypes.iteritems():
p = html5parser.HTMLParser(tree = treeCls["builder"])
document = p.parse("<html><head></head><body>a<div>b</div>c</body></html>")
document = treeCls.get("adapter", lambda x: x)(document)
output = treeCls["walker"](document)
for expectedToken, outputToken in zip(expected, output):
self.assertEquals(expectedToken, outputToken)
def buildTestSuite():
sys.stdout.write('Testing tree walkers '+ " ".join(treeTypes.keys()) + "\n")
for treeName, treeCls in treeTypes.iteritems():
files = html5lib_test_files('tree-construction')
for filename in files:
testName = os.path.basename(filename).replace(".dat","")
tests = TestData(filename, "data")
for index, test in enumerate(tests):
(input, errors,
innerHTML, expected) = [test[key] for key in ("data", "errors",
"document-fragment",
"document")]
errors = errors.split("\n")
def testFunc(self, innerHTML=innerHTML, input=input,
expected=expected, errors=errors, treeCls=treeCls):
self.runTest(innerHTML, input, expected, errors, treeCls)
setattr(TestCase, "test_%s_%d_%s" % (testName,index+1,treeName),
testFunc)
return unittest.TestLoader().loadTestsFromTestCase(TestCase)
def main():
buildTestSuite()
unittest.main()
if __name__ == "__main__":
main()
| 38.222222 | 87 | 0.537791 | import os
import sys
import StringIO
import unittest
import warnings
warnings.simplefilter("error")
from support import html5lib_test_files, TestData, convertExpected
from html5lib import html5parser, treewalkers, treebuilders, constants
from html5lib.filters.lint import Filter as LintFilter, LintError
def PullDOMAdapter(node):
from xml.dom import Node
from xml.dom.pulldom import START_ELEMENT, END_ELEMENT, COMMENT, CHARACTERS
if node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE):
for childNode in node.childNodes:
for event in PullDOMAdapter(childNode):
yield event
elif node.nodeType == Node.DOCUMENT_TYPE_NODE:
raise NotImplementedError("DOCTYPE nodes are not supported by PullDOM")
elif node.nodeType == Node.COMMENT_NODE:
yield COMMENT, node
elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
yield CHARACTERS, node
elif node.nodeType == Node.ELEMENT_NODE:
yield START_ELEMENT, node
for childNode in node.childNodes:
for event in PullDOMAdapter(childNode):
yield event
yield END_ELEMENT, node
else:
raise NotImplementedError("Node type not supported: " + str(node.nodeType))
treeTypes = {
"simpletree": {"builder": treebuilders.getTreeBuilder("simpletree"),
"walker": treewalkers.getTreeWalker("simpletree")},
"DOM": {"builder": treebuilders.getTreeBuilder("dom"),
"walker": treewalkers.getTreeWalker("dom")},
"PullDOM": {"builder": treebuilders.getTreeBuilder("dom"),
"adapter": PullDOMAdapter,
"walker": treewalkers.getTreeWalker("pulldom")},
}
try:
import xml.etree.ElementTree as ElementTree
treeTypes['ElementTree'] = \
{"builder": treebuilders.getTreeBuilder("etree", ElementTree),
"walker": treewalkers.getTreeWalker("etree", ElementTree)}
except ImportError:
try:
import elementtree.ElementTree as ElementTree
treeTypes['ElementTree'] = \
{"builder": treebuilders.getTreeBuilder("etree", ElementTree),
"walker": treewalkers.getTreeWalker("etree", ElementTree)}
except ImportError:
pass
try:
import xml.etree.cElementTree as ElementTree
treeTypes['cElementTree'] = \
{"builder": treebuilders.getTreeBuilder("etree", ElementTree),
"walker": treewalkers.getTreeWalker("etree", ElementTree)}
except ImportError:
try:
import cElementTree as ElementTree
treeTypes['cElementTree'] = \
{"builder": treebuilders.getTreeBuilder("etree", ElementTree),
"walker": treewalkers.getTreeWalker("etree", ElementTree)}
except ImportError:
pass
try:
import lxml.etree as ElementTree
treeTypes['lxml_native'] = \
{"builder": treebuilders.getTreeBuilder("lxml"),
"walker": treewalkers.getTreeWalker("lxml")}
except ImportError:
pass
try:
import BeautifulSoup
treeTypes["beautifulsoup"] = \
{"builder": treebuilders.getTreeBuilder("beautifulsoup"),
"walker": treewalkers.getTreeWalker("beautifulsoup")}
except ImportError:
pass
try:
import pxdom
treeTypes['pxdom'] = \
{"builder": treebuilders.getTreeBuilder("dom", pxdom),
"walker": treewalkers.getTreeWalker("dom")}
except ImportError:
pass
try:
from genshi.core import QName, Attrs
from genshi.core import START, END, TEXT, COMMENT, DOCTYPE
def GenshiAdapter(tree):
text = None
for token in treewalkers.getTreeWalker("simpletree")(tree):
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
if text is None:
text = token["data"]
else:
text += token["data"]
elif text is not None:
yield TEXT, text, (None, -1, -1)
text = None
if type in ("StartTag", "EmptyTag"):
if token["namespace"]:
name = u"{%s}%s" % (token["namespace"], token["name"])
else:
name = token["name"]
yield (START,
(QName(name),
Attrs([(QName(attr),value) for attr,value in token["data"]])),
(None, -1, -1))
if type == "EmptyTag":
type = "EndTag"
if type == "EndTag":
yield END, QName(token["name"]), (None, -1, -1)
elif type == "Comment":
yield COMMENT, token["data"], (None, -1, -1)
elif type == "Doctype":
yield DOCTYPE, (token["name"], token["publicId"],
token["systemId"]), (None, -1, -1)
else:
pass
if text is not None:
yield TEXT, text, (None, -1, -1)
except ImportError:
pass
def concatenateCharacterTokens(tokens):
charactersToken = None
for token in tokens:
type = token["type"]
if type in ("Characters", "SpaceCharacters"):
if charactersToken is None:
charactersToken = {"type": "Characters", "data": token["data"]}
else:
charactersToken["data"] += token["data"]
else:
if charactersToken is not None:
yield charactersToken
charactersToken = None
yield token
if charactersToken is not None:
yield charactersToken
def convertTokens(tokens):
output = []
indent = 0
for token in concatenateCharacterTokens(tokens):
type = token["type"]
if type in ("StartTag", "EmptyTag"):
if (token["namespace"] and
token["namespace"] != constants.namespaces["html"]):
if token["namespace"] in constants.prefixes:
name = constants.prefixes[token["namespace"]]
else:
name = token["namespace"]
name += u" " + token["name"]
else:
name = token["name"]
output.append(u"%s<%s>" % (" "*indent, name))
indent += 2
attrs = token["data"]
if attrs:
attrs.sort(lambda a,b: cmp(a["name"], b["name"]))
for attr in attrs:
if attr["namespace"]:
if attr["namespace"] in constants.prefixes:
name = constants.prefixes[attr["namespace"]]
else:
name = attr["namespace"]
name += u" " + attr["name"]
else:
name = attr["name"]
output.append(u"%s%s=\"%s\"" % (" "*indent, name, attr["value"]))
if type == "EmptyTag":
indent -= 2
elif type == "EndTag":
indent -= 2
elif type == "Comment":
output.append("%s<!-- %s -->" % (" "*indent, token["data"]))
elif type == "Doctype":
if token["name"]:
if token["publicId"]:
output.append("""%s<!DOCTYPE %s "%s" "%s">"""%
(" "*indent, token["name"],
token["publicId"],
token["systemId"] and token["systemId"] or ""))
elif token["systemId"]:
output.append("""%s<!DOCTYPE %s "" "%s">"""%
(" "*indent, token["name"],
token["systemId"]))
else:
output.append("%s<!DOCTYPE %s>"%(" "*indent,
token["name"]))
else:
output.append("%s<!DOCTYPE >" % (" "*indent,))
elif type in ("Characters", "SpaceCharacters"):
output.append("%s\"%s\"" % (" "*indent, token["data"]))
else:
pass
return u"\n".join(output)
import re
attrlist = re.compile(r"^(\s+)\w+=.*(\n\1\w+=.*)+",re.M)
def sortattrs(x):
lines = x.group(0).split("\n")
lines.sort()
return "\n".join(lines)
class TestCase(unittest.TestCase):
def runTest(self, innerHTML, input, expected, errors, treeClass):
try:
p = html5parser.HTMLParser(tree = treeClass["builder"])
if innerHTML:
document = p.parseFragment(StringIO.StringIO(input), innerHTML)
else:
document = p.parse(StringIO.StringIO(input))
except constants.DataLossWarning:
return
document = treeClass.get("adapter", lambda x: x)(document)
try:
output = convertTokens(treeClass["walker"](document))
output = attrlist.sub(sortattrs, output)
expected = attrlist.sub(sortattrs, convertExpected(expected))
self.assertEquals(expected, output, "\n".join([
"", "Input:", input,
"", "Expected:", expected,
"", "Received:", output
]))
except NotImplementedError:
pass # Amnesty for those that confess...
class TokenTestCase(unittest.TestCase):
def test_all_tokens(self):
expected = [
{'data': [], 'type': 'StartTag', 'name': u'html'},
{'data': [], 'type': 'StartTag', 'name': u'head'},
{'data': [], 'type': 'EndTag', 'name': u'head'},
{'data': [], 'type': 'StartTag', 'name': u'body'},
{'data': u'a', 'type': 'Characters'},
{'data': [], 'type': 'StartTag', 'name': u'div'},
{'data': u'b', 'type': 'Characters'},
{'data': [], 'type': 'EndTag', 'name': u'div'},
{'data': u'c', 'type': 'Characters'},
{'data': [], 'type': 'EndTag', 'name': u'body'},
{'data': [], 'type': 'EndTag', 'name': u'html'}
]
for treeName, treeCls in treeTypes.iteritems():
p = html5parser.HTMLParser(tree = treeCls["builder"])
document = p.parse("<html><head></head><body>a<div>b</div>c</body></html>")
document = treeCls.get("adapter", lambda x: x)(document)
output = treeCls["walker"](document)
for expectedToken, outputToken in zip(expected, output):
self.assertEquals(expectedToken, outputToken)
def buildTestSuite():
sys.stdout.write('Testing tree walkers '+ " ".join(treeTypes.keys()) + "\n")
for treeName, treeCls in treeTypes.iteritems():
files = html5lib_test_files('tree-construction')
for filename in files:
testName = os.path.basename(filename).replace(".dat","")
tests = TestData(filename, "data")
for index, test in enumerate(tests):
(input, errors,
innerHTML, expected) = [test[key] for key in ("data", "errors",
"document-fragment",
"document")]
errors = errors.split("\n")
def testFunc(self, innerHTML=innerHTML, input=input,
expected=expected, errors=errors, treeCls=treeCls):
self.runTest(innerHTML, input, expected, errors, treeCls)
setattr(TestCase, "test_%s_%d_%s" % (testName,index+1,treeName),
testFunc)
return unittest.TestLoader().loadTestsFromTestCase(TestCase)
def main():
buildTestSuite()
unittest.main()
if __name__ == "__main__":
main()
| true | true |
1c382cbe929885479c5aacb20d6f2814d53176b1 | 10,360 | py | Python | rnn-gru-random/train.py | manashpratim/Classification-of-Song-Attributes-using-Lyrics | 801b7d45bd98f3edbb4219d7e947a1bd20ec206c | [
"MIT"
] | 1 | 2020-01-17T03:05:48.000Z | 2020-01-17T03:05:48.000Z | rnn-gru-random/train.py | manashpratim/Classification-of-Song-Attributes-using-Lyrics | 801b7d45bd98f3edbb4219d7e947a1bd20ec206c | [
"MIT"
] | null | null | null | rnn-gru-random/train.py | manashpratim/Classification-of-Song-Attributes-using-Lyrics | 801b7d45bd98f3edbb4219d7e947a1bd20ec206c | [
"MIT"
] | null | null | null | import tensorflow as tf
import numpy as np
import os,sys
import datetime
import time
from rnn import RNN
import data_helpers
# Parameters
# ==================================================
did = sys.argv[1]
f1 = open("log_{}".format(did),"w")
# Data loading params
tf.flags.DEFINE_string("pos_dir", "data/rt-polaritydata/rt-polarity.pos", "Path of positive data")
tf.flags.DEFINE_string("neg_dir", "data/rt-polaritydata/rt-polarity.neg", "Path of negative data")
tf.flags.DEFINE_float("dev_sample_percentage", .1, "Percentage of the training data to use for validation")
#tf.flags.DEFINE_integer("max_sentence_length", 100, "Max sentence length in train/test data (Default: 100)")
# Model Hyperparameters
tf.flags.DEFINE_string("cell_type", "lstm", "Type of rnn cell. Choose 'vanilla' or 'lstm' or 'gru' (Default: vanilla)")
tf.flags.DEFINE_string("word2vec", None, "Word2vec file with pre-trained embeddings")
tf.flags.DEFINE_integer("embedding_dim", 300, "Dimensionality of character embedding (Default: 300)")
tf.flags.DEFINE_integer("hidden_size", 64, "Dimensionality of character embedding (Default: 128)")
tf.flags.DEFINE_float("dropout_keep_prob", 0.5, "Dropout keep probability (Default: 0.5)")
tf.flags.DEFINE_float("l2_reg_lambda", 3.0, "L2 regularization lambda (Default: 3.0)")
# Training parameters
tf.flags.DEFINE_integer("batch_size", 60, "Batch Size (Default: 64)")
tf.flags.DEFINE_integer("num_epochs", 30, "Number of training epochs (Default: 100)")
tf.flags.DEFINE_integer("display_every", 50, "Number of iterations to display training info.")
tf.flags.DEFINE_integer("evaluate_every", 50, "Evaluate model on dev set after this many steps")
tf.flags.DEFINE_integer("checkpoint_every", 1000, "Save model after this many steps")
tf.flags.DEFINE_integer("num_checkpoints", 11, "Number of checkpoints to store")
tf.flags.DEFINE_float("learning_rate", 1e-3, "Which learning rate to start with. (Default: 1e-3)")
# Misc Parameters
tf.flags.DEFINE_boolean("allow_soft_placement", True, "Allow device soft device placement")
tf.flags.DEFINE_boolean("log_device_placement", False, "Log placement of ops on devices")
FLAGS = tf.flags.FLAGS
FLAGS._parse_flags()
print("\nParameters:")
f1.write("\nParameters:\n")
for attr, value in sorted(FLAGS.__flags.items()):
print("{} = {}".format(attr.upper(), value))
f1.write("{} = {}\n".format(attr.upper(), value))
print("")
def train(x_train, y_train, vocabulary, x_dev, y_dev,cell):
#with tf.device('/cpu:0'):
# x_text, y = data_helpers.load_data_and_labels(FLAGS.pos_dir, FLAGS.neg_dir)
#text_vocab_processor = tf.contrib.learn.preprocessing.VocabularyProcessor(FLAGS.max_sentence_length)
#x = np.array(list(text_vocab_processor.fit_transform(x_text)))
#print("Text Vocabulary Size: {:d}".format(len(text_vocab_processor.vocabulary_)))
#print("x = {0}".format(x.shape))
#print("y = {0}".format(y.shape))
#print("")
# Randomly shuffle data
#np.random.seed(10)
#shuffle_indices = np.random.permutation(np.arange(len(y)))
#x_shuffled = x[shuffle_indices]
#y_shuffled = y[shuffle_indices]
# Split train/test set
# TODO: This is very crude, should use cross-validation
#dev_sample_index = -1 * int(FLAGS.dev_sample_percentage * float(len(y)))
#x_train, x_dev = x_shuffled[:dev_sample_index], x_shuffled[dev_sample_index:]
#y_train, y_dev = y_shuffled[:dev_sample_index], y_shuffled[dev_sample_index:]
#print("Train/Dev split: {:d}/{:d}\n".format(len(y_train), len(y_dev)))
print("*************************************************"+cell+"****************************************************")
with tf.Graph().as_default():
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.40)
session_conf = tf.ConfigProto(
allow_soft_placement=FLAGS.allow_soft_placement,
log_device_placement=FLAGS.log_device_placement,
gpu_options=gpu_options)
sess = tf.Session(config=session_conf)
with sess.as_default():
rnn = RNN(
sequence_length=x_train.shape[1],
num_classes=y_train.shape[1],
vocab_size=len(vocabulary),
embedding_size=FLAGS.embedding_dim,
cell_type=cell,
hidden_size=FLAGS.hidden_size,
l2_reg_lambda=FLAGS.l2_reg_lambda
)
# Define Training procedure
global_step = tf.Variable(0, name="global_step", trainable=False)
train_op = tf.train.AdamOptimizer(FLAGS.learning_rate).minimize(rnn.loss, global_step=global_step)
# Output directory for models and summaries
#timestamp = str(int(time.time()))
#out_dir = os.path.abspath(os.path.join(os.path.curdir, "runs", timestamp))
#print("Writing to {}\n".format(out_dir))
# Summaries for loss and accuracy
loss_summary = tf.summary.scalar("loss", rnn.loss)
acc_summary = tf.summary.scalar("accuracy", rnn.accuracy)
# Train Summaries
#train_summary_op = tf.summary.merge([loss_summary, acc_summary])
#train_summary_dir = os.path.join(out_dir, "summaries", "train")
#train_summary_writer = tf.summary.FileWriter(train_summary_dir, sess.graph)
# Dev summaries
#dev_summary_op = tf.summary.merge([loss_summary, acc_summary])
#dev_summary_dir = os.path.join(out_dir, "summaries", "dev")
#dev_summary_writer = tf.summary.FileWriter(dev_summary_dir, sess.graph)
# Checkpoint directory. Tensorflow assumes this directory already exists so we need to create it
checkpoint_dir = "models/"
checkpoint_prefix = os.path.join(checkpoint_dir, "model_{}_{}".format(cell,did))
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
saver = tf.train.Saver(tf.global_variables(), max_to_keep=FLAGS.num_checkpoints)
# Write vocabulary
#text_vocab_processor.save(os.path.join(out_dir, "text_vocab"))
# Initialize all variables
sess.run(tf.global_variables_initializer())
# Pre-trained word2vec
if FLAGS.word2vec:
# initial matrix with random uniform
initW = np.random.uniform(-0.25, 0.25, (len(text_vocab_processor.vocabulary_), FLAGS.embedding_dim))
# load any vectors from the word2vec
print("Load word2vec file {0}".format(FLAGS.word2vec))
with open(FLAGS.word2vec, "rb") as f:
header = f.readline()
vocab_size, layer1_size = map(int, header.split())
binary_len = np.dtype('float32').itemsize * layer1_size
for line in range(vocab_size):
word = []
while True:
ch = f.read(1).decode('latin-1')
if ch == ' ':
word = ''.join(word)
break
if ch != '\n':
word.append(ch)
idx = text_vocab_processor.vocabulary_.get(word)
if idx != 0:
initW[idx] = np.fromstring(f.read(binary_len), dtype='float32')
else:
f.read(binary_len)
sess.run(rnn.W_text.assign(initW))
print("Success to load pre-trained word2vec model!\n")
# Generate batches
batches = data_helpers.batch_iter(
list(zip(x_train, y_train)), FLAGS.batch_size, FLAGS.num_epochs)
# Training loop. For each batch...
for batch in batches:
x_batch, y_batch = zip(*batch)
# Train
feed_dict = {
rnn.input_text: x_batch,
rnn.input_y: y_batch,
rnn.dropout_keep_prob: FLAGS.dropout_keep_prob
}
_, step, loss, accuracy = sess.run(
[train_op, global_step, rnn.loss, rnn.accuracy], feed_dict)
#train_summary_writer.add_summary(summaries, step)
# Training log display
if step % FLAGS.display_every == 0:
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
f1.write("{}: step {}, loss {:g}, acc {:g}\n".format(time_str, step, loss, accuracy))
# Evaluation
if step % FLAGS.evaluate_every == 0:
f1.write("\nEvaluation:\n")
print("\nEvaluation:")
feed_dict_dev = {
rnn.input_text: x_dev,
rnn.input_y: y_dev,
rnn.dropout_keep_prob: 1.0
}
step, loss, accuracy = sess.run(
[global_step, rnn.loss, rnn.accuracy], feed_dict_dev)
#dev_summary_writer.add_summary(summaries_dev, step)
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}\n".format(time_str, step, loss, accuracy))
f1.write("{}: step {}, loss {:g}, acc {:g}\n".format(time_str, step, loss, accuracy))
# Model checkpoint
if step % FLAGS.checkpoint_every == 0:
path = saver.save(sess, checkpoint_prefix, global_step=step)
print("Saved model checkpoint to {}\n".format(path))
f1.write("Saved model checkpoint to {}\n".format(path))
path = saver.save(sess, "models/model-final_{}_{}".format(cell,did))
print("Saved model checkpoint to {}\n".format(path))
f1.write("Saved model checkpoint to {}\n".format(path))
def main(_):
x_train, y_train, vocabulary, x_dev, y_dev = data_helpers.preprocess(did)
cell_types =["gru"]
for cell_type in cell_types:
train(x_train, y_train, vocabulary, x_dev, y_dev,cell_type)
if __name__ == "__main__":
tf.app.run()
| 48.867925 | 122 | 0.597587 | import tensorflow as tf
import numpy as np
import os,sys
import datetime
import time
from rnn import RNN
import data_helpers
did = sys.argv[1]
f1 = open("log_{}".format(did),"w")
tf.flags.DEFINE_string("pos_dir", "data/rt-polaritydata/rt-polarity.pos", "Path of positive data")
tf.flags.DEFINE_string("neg_dir", "data/rt-polaritydata/rt-polarity.neg", "Path of negative data")
tf.flags.DEFINE_float("dev_sample_percentage", .1, "Percentage of the training data to use for validation")
tf.flags.DEFINE_string("cell_type", "lstm", "Type of rnn cell. Choose 'vanilla' or 'lstm' or 'gru' (Default: vanilla)")
tf.flags.DEFINE_string("word2vec", None, "Word2vec file with pre-trained embeddings")
tf.flags.DEFINE_integer("embedding_dim", 300, "Dimensionality of character embedding (Default: 300)")
tf.flags.DEFINE_integer("hidden_size", 64, "Dimensionality of character embedding (Default: 128)")
tf.flags.DEFINE_float("dropout_keep_prob", 0.5, "Dropout keep probability (Default: 0.5)")
tf.flags.DEFINE_float("l2_reg_lambda", 3.0, "L2 regularization lambda (Default: 3.0)")
tf.flags.DEFINE_integer("batch_size", 60, "Batch Size (Default: 64)")
tf.flags.DEFINE_integer("num_epochs", 30, "Number of training epochs (Default: 100)")
tf.flags.DEFINE_integer("display_every", 50, "Number of iterations to display training info.")
tf.flags.DEFINE_integer("evaluate_every", 50, "Evaluate model on dev set after this many steps")
tf.flags.DEFINE_integer("checkpoint_every", 1000, "Save model after this many steps")
tf.flags.DEFINE_integer("num_checkpoints", 11, "Number of checkpoints to store")
tf.flags.DEFINE_float("learning_rate", 1e-3, "Which learning rate to start with. (Default: 1e-3)")
tf.flags.DEFINE_boolean("allow_soft_placement", True, "Allow device soft device placement")
tf.flags.DEFINE_boolean("log_device_placement", False, "Log placement of ops on devices")
FLAGS = tf.flags.FLAGS
FLAGS._parse_flags()
print("\nParameters:")
f1.write("\nParameters:\n")
for attr, value in sorted(FLAGS.__flags.items()):
print("{} = {}".format(attr.upper(), value))
f1.write("{} = {}\n".format(attr.upper(), value))
print("")
def train(x_train, y_train, vocabulary, x_dev, y_dev,cell):
print("*************************************************"+cell+"****************************************************")
with tf.Graph().as_default():
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.40)
session_conf = tf.ConfigProto(
allow_soft_placement=FLAGS.allow_soft_placement,
log_device_placement=FLAGS.log_device_placement,
gpu_options=gpu_options)
sess = tf.Session(config=session_conf)
with sess.as_default():
rnn = RNN(
sequence_length=x_train.shape[1],
num_classes=y_train.shape[1],
vocab_size=len(vocabulary),
embedding_size=FLAGS.embedding_dim,
cell_type=cell,
hidden_size=FLAGS.hidden_size,
l2_reg_lambda=FLAGS.l2_reg_lambda
)
global_step = tf.Variable(0, name="global_step", trainable=False)
train_op = tf.train.AdamOptimizer(FLAGS.learning_rate).minimize(rnn.loss, global_step=global_step)
loss_summary = tf.summary.scalar("loss", rnn.loss)
acc_summary = tf.summary.scalar("accuracy", rnn.accuracy)
checkpoint_dir = "models/"
checkpoint_prefix = os.path.join(checkpoint_dir, "model_{}_{}".format(cell,did))
if not os.path.exists(checkpoint_dir):
os.makedirs(checkpoint_dir)
saver = tf.train.Saver(tf.global_variables(), max_to_keep=FLAGS.num_checkpoints)
sess.run(tf.global_variables_initializer())
if FLAGS.word2vec:
initW = np.random.uniform(-0.25, 0.25, (len(text_vocab_processor.vocabulary_), FLAGS.embedding_dim))
print("Load word2vec file {0}".format(FLAGS.word2vec))
with open(FLAGS.word2vec, "rb") as f:
header = f.readline()
vocab_size, layer1_size = map(int, header.split())
binary_len = np.dtype('float32').itemsize * layer1_size
for line in range(vocab_size):
word = []
while True:
ch = f.read(1).decode('latin-1')
if ch == ' ':
word = ''.join(word)
break
if ch != '\n':
word.append(ch)
idx = text_vocab_processor.vocabulary_.get(word)
if idx != 0:
initW[idx] = np.fromstring(f.read(binary_len), dtype='float32')
else:
f.read(binary_len)
sess.run(rnn.W_text.assign(initW))
print("Success to load pre-trained word2vec model!\n")
batches = data_helpers.batch_iter(
list(zip(x_train, y_train)), FLAGS.batch_size, FLAGS.num_epochs)
for batch in batches:
x_batch, y_batch = zip(*batch)
feed_dict = {
rnn.input_text: x_batch,
rnn.input_y: y_batch,
rnn.dropout_keep_prob: FLAGS.dropout_keep_prob
}
_, step, loss, accuracy = sess.run(
[train_op, global_step, rnn.loss, rnn.accuracy], feed_dict)
if step % FLAGS.display_every == 0:
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}".format(time_str, step, loss, accuracy))
f1.write("{}: step {}, loss {:g}, acc {:g}\n".format(time_str, step, loss, accuracy))
if step % FLAGS.evaluate_every == 0:
f1.write("\nEvaluation:\n")
print("\nEvaluation:")
feed_dict_dev = {
rnn.input_text: x_dev,
rnn.input_y: y_dev,
rnn.dropout_keep_prob: 1.0
}
step, loss, accuracy = sess.run(
[global_step, rnn.loss, rnn.accuracy], feed_dict_dev)
time_str = datetime.datetime.now().isoformat()
print("{}: step {}, loss {:g}, acc {:g}\n".format(time_str, step, loss, accuracy))
f1.write("{}: step {}, loss {:g}, acc {:g}\n".format(time_str, step, loss, accuracy))
if step % FLAGS.checkpoint_every == 0:
path = saver.save(sess, checkpoint_prefix, global_step=step)
print("Saved model checkpoint to {}\n".format(path))
f1.write("Saved model checkpoint to {}\n".format(path))
path = saver.save(sess, "models/model-final_{}_{}".format(cell,did))
print("Saved model checkpoint to {}\n".format(path))
f1.write("Saved model checkpoint to {}\n".format(path))
def main(_):
x_train, y_train, vocabulary, x_dev, y_dev = data_helpers.preprocess(did)
cell_types =["gru"]
for cell_type in cell_types:
train(x_train, y_train, vocabulary, x_dev, y_dev,cell_type)
if __name__ == "__main__":
tf.app.run()
| true | true |
1c382d2b8800ba69f78fa9e0859b7d4c992c4f1e | 4,842 | py | Python | earth_enterprise/src/server/wsgi/serve/constants.py | jsuberza/earthenterprise | e661a41f7cc218c60b269b0c9911df928cd9ce12 | [
"Apache-2.0"
] | 1 | 2020-06-17T12:20:52.000Z | 2020-06-17T12:20:52.000Z | earth_enterprise/src/server/wsgi/serve/constants.py | jsuberza/earthenterprise | e661a41f7cc218c60b269b0c9911df928cd9ce12 | [
"Apache-2.0"
] | null | null | null | earth_enterprise/src/server/wsgi/serve/constants.py | jsuberza/earthenterprise | e661a41f7cc218c60b269b0c9911df928cd9ce12 | [
"Apache-2.0"
] | 1 | 2020-12-16T09:26:10.000Z | 2020-12-16T09:26:10.000Z | #!/usr/bin/python
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Publisher constants: request commands and parameters, response header names.
"""
# Delimiter for multi-part parameters (array-parameters).
MULTI_PART_PARAMETER_DELIMITER = ","
POI_SEARCH_SERVICE_NAME = "POISearch"
DEFAULT_SEARCH_SERVICE_NAME = "GeocodingFederated"
HOST_NAME = "Host"
# Request commands.
CMD = "Cmd"
CMD_PING = "Ping"
CMD_RESET = "Reset"
CMD_QUERY = "Query"
CMD_ADD_DB = "AddDb"
CMD_DELETE_DB = "DeleteDb"
CMD_CLEANUP_DB = "CleanupDb"
CMD_PUBLISH_DB = "PublishDb"
CMD_REPUBLISH_DB = "RepublishDb"
CMD_SWAP_TARGETS = "SwapTargets"
CMD_UNPUBLISH_DB = "UnPublishDb"
CMD_SYNC_DB = "SyncDb"
CMD_ADD_VS = "AddVs"
CMD_DELETE_VS = "DeleteVs"
CMD_DISABLE_VS = "DisableVs"
CMD_ADD_SEARCH_DEF = "AddSearchDef"
CMD_DELETE_SEARCH_DEF = "DeleteSearchDef"
CMD_ADD_PLUGIN = "AddPlugin"
CMD_DELETE_PLUGIN = "DeletePlugin"
CMD_DECREMENT_COUNT = "DecrementCount"
CMD_LOCAL_TRANSFER = "LocalTransfer"
CMD_GARBAGE_COLLECT = "GarbageCollect"
CMD_CLEANUP = "Cleanup"
CMD_ADD_SNIPPET_SET = "AddSnippetSet"
CMD_DELETE_SNIPPET_SET = "DeleteSnippetSet"
# Request Params.
QUERY_CMD = "QueryCmd"
QUERY_CMD_LIST_DBS = "ListDbs"
QUERY_CMD_LIST_ASSETS = "ListAllAssets"
QUERY_CMD_DB_DETAILS = "DbDetails"
QUERY_CMD_LIST_VSS = "ListVss"
QUERY_CMD_VS_DETAILS = "VsDetails"
QUERY_CMD_LIST_TGS = "ListTgs"
QUERY_CMD_TARGET_DETAILS = "TargetDetails"
QUERY_CMD_LIST_SEARCH_DEFS = "ListSearchDefs"
QUERY_CMD_SEARCH_DEF_DETAILS = "SearchDefDetails"
QUERY_CMD_LIST_SNIPPET_SETS = "ListSnippetSets"
QUERY_CMD_SNIPPET_SET_DETAILS = "SnippetSetDetails"
QUERY_CMD_LIST_META_FIELD_PATHS = "ListMetaFieldPaths"
QUERY_CMD_META_FIELDS_SET = "MetaFieldsSet"
QUERY_CMD_PUBLISHED_DB_DETAILS = "PublishedDbDetails"
QUERY_CMD_PUBLISHED_DBS = "PublishedDbs"
QUERY_CMD_SERVER_PREFIX = "ServerPrefix"
QUERY_CMD_HOST_ROOT = "HostRoot"
QUERY_CMD_SERVER_HOST = "ServerHost"
QUERY_CMD_ALLOW_SYM_LINKS = "AllowSymLinks"
QUERY_CMD_LIST_PLUGIND = "ListPlugins"
QUERY_CMD_GEDB_PATH = "GeDbPath"
DB_ID = "DbId"
DB_NAME = "DbName"
DB_PRETTY_NAME = "DbPrettyName"
DB_TYPE = "DbType"
DB_TIMESTAMP = "DbTimestamp"
DB_SIZE = "DbSize"
DB_USE_GOOGLE_BASEMAP = "DbUseGoogleBasemap"
FILE_PATH = "FilePath"
FILE_SIZE = "FileSize"
VS_NAME = "VsName"
VS_TYPE = "VsType"
VS_URL = "VsUrl"
VS_SSL = "VsSsl"
VS_CACHE_LEVEL = "VsCacheLevel"
PLUGIN_NAME = "PluginName"
CLASS_NAME = "ClassName"
SEARCH_URL = "SearchUrl"
SEARCH_VS_NAME = "SearchVsName"
DEST_FILE_PATH = "DestFilePath"
FORCE_COPY = "ForceCopy"
PREFER_COPY = "PreferCopy"
TARGET_PATH = "TargetPath"
TARGET_PATH_A = "TargetPathA"
TARGET_PATH_B = "TargetPathB"
VIRTUAL_HOST_NAME = "VirtualHostName"
SEARCH_DEF_NAME = "SearchDefName"
SUPPLEMENTAL_SEARCH_DEF_NAME = "SupSearchDefName"
SEARCH_DEF = "SearchDef"
POI_FEDERATED = "PoiFederated"
POI_SUGGESTION = "PoiSuggestion"
NEED_SEARCH_TAB_ID = "NeedSearchTabId"
SUPPLEMENTAL_UI_LABEL = "SupUiLabel"
SNIPPET_SET_NAME = "SnippetSetName"
SNIPPET_SET = "SnippetSet"
SERVE_WMS = "ServeWms"
EC_DEFAULT_DB = "EcDefaultDb"
ORIGIN_REQUEST_HOST = "OriginRequestHost"
# Response header names.
HDR_STATUS_CODE = "Gepublish-StatusCode"
HDR_STATUS_MESSAGE = "Gepublish-StatusMessage"
HDR_FILE_NAME = "Gepublish-FileName"
HDR_PLUGIN_DETAILS = "Gepublish-PluginDetails"
HDR_HOST_NAME = "Gepublish-HostName"
HDR_DB_NAME = "Gepublish-DbName"
HDR_DB_PRETTY_NAME = "Gepublish-DbPrettyName"
HDR_TARGET_PATH = "Gepublish-TargetPath"
HDR_VS_TYPE = "Gepublish-VsType"
HDR_VS_NAME = "Gepublish-VsName"
HDR_SERVER_PREFIX = "Gepublish-ServerPrefix"
HDR_SERVER_HOST = "Gepublish-ServerHost"
HDR_SERVER_HOST_FULL = "Gepublish-ServerHostFull"
HDR_ALLOW_SYM_LINKS = "Gepublish-AllowSymLinks"
HDR_VS_URL = "Gepublish-VsUrl"
HDR_DB_ID = "Gepublish-DbId"
HDR_HOST_ROOT = "Gepublish-HostRoot"
HDR_DELETE_COUNT = "Gepublish-DeleteCount"
HDR_DELETE_SIZE = "Gepublish-DeleteSize"
HDR_SEARCH_URL = "Gepublish-SearchUrl"
HDR_PLUGIN_NAME = "Gepublish-PluginName"
HDR_PLUGIN_CLASS_NAME = "Gepublish-PluginClassName"
HDR_DATA = "Gepublish-Data"
HDR_JSON_RESULTS = "results"
HDR_JSON_STATUS_CODE = "status_code"
HDR_JSON_STATUS_MESSAGE = "status_message"
# TODO: Get from mod_fdb!?
CUTTER_GLOBES_PATH = "/opt/google/gehttpd/htdocs/cutter/globes"
# Response status codes.
STATUS_FAILURE = -1
STATUS_SUCCESS = 0
STATUS_UPLOAD_NEEDED = 1
| 31.855263 | 79 | 0.803594 |
MULTI_PART_PARAMETER_DELIMITER = ","
POI_SEARCH_SERVICE_NAME = "POISearch"
DEFAULT_SEARCH_SERVICE_NAME = "GeocodingFederated"
HOST_NAME = "Host"
CMD = "Cmd"
CMD_PING = "Ping"
CMD_RESET = "Reset"
CMD_QUERY = "Query"
CMD_ADD_DB = "AddDb"
CMD_DELETE_DB = "DeleteDb"
CMD_CLEANUP_DB = "CleanupDb"
CMD_PUBLISH_DB = "PublishDb"
CMD_REPUBLISH_DB = "RepublishDb"
CMD_SWAP_TARGETS = "SwapTargets"
CMD_UNPUBLISH_DB = "UnPublishDb"
CMD_SYNC_DB = "SyncDb"
CMD_ADD_VS = "AddVs"
CMD_DELETE_VS = "DeleteVs"
CMD_DISABLE_VS = "DisableVs"
CMD_ADD_SEARCH_DEF = "AddSearchDef"
CMD_DELETE_SEARCH_DEF = "DeleteSearchDef"
CMD_ADD_PLUGIN = "AddPlugin"
CMD_DELETE_PLUGIN = "DeletePlugin"
CMD_DECREMENT_COUNT = "DecrementCount"
CMD_LOCAL_TRANSFER = "LocalTransfer"
CMD_GARBAGE_COLLECT = "GarbageCollect"
CMD_CLEANUP = "Cleanup"
CMD_ADD_SNIPPET_SET = "AddSnippetSet"
CMD_DELETE_SNIPPET_SET = "DeleteSnippetSet"
QUERY_CMD = "QueryCmd"
QUERY_CMD_LIST_DBS = "ListDbs"
QUERY_CMD_LIST_ASSETS = "ListAllAssets"
QUERY_CMD_DB_DETAILS = "DbDetails"
QUERY_CMD_LIST_VSS = "ListVss"
QUERY_CMD_VS_DETAILS = "VsDetails"
QUERY_CMD_LIST_TGS = "ListTgs"
QUERY_CMD_TARGET_DETAILS = "TargetDetails"
QUERY_CMD_LIST_SEARCH_DEFS = "ListSearchDefs"
QUERY_CMD_SEARCH_DEF_DETAILS = "SearchDefDetails"
QUERY_CMD_LIST_SNIPPET_SETS = "ListSnippetSets"
QUERY_CMD_SNIPPET_SET_DETAILS = "SnippetSetDetails"
QUERY_CMD_LIST_META_FIELD_PATHS = "ListMetaFieldPaths"
QUERY_CMD_META_FIELDS_SET = "MetaFieldsSet"
QUERY_CMD_PUBLISHED_DB_DETAILS = "PublishedDbDetails"
QUERY_CMD_PUBLISHED_DBS = "PublishedDbs"
QUERY_CMD_SERVER_PREFIX = "ServerPrefix"
QUERY_CMD_HOST_ROOT = "HostRoot"
QUERY_CMD_SERVER_HOST = "ServerHost"
QUERY_CMD_ALLOW_SYM_LINKS = "AllowSymLinks"
QUERY_CMD_LIST_PLUGIND = "ListPlugins"
QUERY_CMD_GEDB_PATH = "GeDbPath"
DB_ID = "DbId"
DB_NAME = "DbName"
DB_PRETTY_NAME = "DbPrettyName"
DB_TYPE = "DbType"
DB_TIMESTAMP = "DbTimestamp"
DB_SIZE = "DbSize"
DB_USE_GOOGLE_BASEMAP = "DbUseGoogleBasemap"
FILE_PATH = "FilePath"
FILE_SIZE = "FileSize"
VS_NAME = "VsName"
VS_TYPE = "VsType"
VS_URL = "VsUrl"
VS_SSL = "VsSsl"
VS_CACHE_LEVEL = "VsCacheLevel"
PLUGIN_NAME = "PluginName"
CLASS_NAME = "ClassName"
SEARCH_URL = "SearchUrl"
SEARCH_VS_NAME = "SearchVsName"
DEST_FILE_PATH = "DestFilePath"
FORCE_COPY = "ForceCopy"
PREFER_COPY = "PreferCopy"
TARGET_PATH = "TargetPath"
TARGET_PATH_A = "TargetPathA"
TARGET_PATH_B = "TargetPathB"
VIRTUAL_HOST_NAME = "VirtualHostName"
SEARCH_DEF_NAME = "SearchDefName"
SUPPLEMENTAL_SEARCH_DEF_NAME = "SupSearchDefName"
SEARCH_DEF = "SearchDef"
POI_FEDERATED = "PoiFederated"
POI_SUGGESTION = "PoiSuggestion"
NEED_SEARCH_TAB_ID = "NeedSearchTabId"
SUPPLEMENTAL_UI_LABEL = "SupUiLabel"
SNIPPET_SET_NAME = "SnippetSetName"
SNIPPET_SET = "SnippetSet"
SERVE_WMS = "ServeWms"
EC_DEFAULT_DB = "EcDefaultDb"
ORIGIN_REQUEST_HOST = "OriginRequestHost"
HDR_STATUS_CODE = "Gepublish-StatusCode"
HDR_STATUS_MESSAGE = "Gepublish-StatusMessage"
HDR_FILE_NAME = "Gepublish-FileName"
HDR_PLUGIN_DETAILS = "Gepublish-PluginDetails"
HDR_HOST_NAME = "Gepublish-HostName"
HDR_DB_NAME = "Gepublish-DbName"
HDR_DB_PRETTY_NAME = "Gepublish-DbPrettyName"
HDR_TARGET_PATH = "Gepublish-TargetPath"
HDR_VS_TYPE = "Gepublish-VsType"
HDR_VS_NAME = "Gepublish-VsName"
HDR_SERVER_PREFIX = "Gepublish-ServerPrefix"
HDR_SERVER_HOST = "Gepublish-ServerHost"
HDR_SERVER_HOST_FULL = "Gepublish-ServerHostFull"
HDR_ALLOW_SYM_LINKS = "Gepublish-AllowSymLinks"
HDR_VS_URL = "Gepublish-VsUrl"
HDR_DB_ID = "Gepublish-DbId"
HDR_HOST_ROOT = "Gepublish-HostRoot"
HDR_DELETE_COUNT = "Gepublish-DeleteCount"
HDR_DELETE_SIZE = "Gepublish-DeleteSize"
HDR_SEARCH_URL = "Gepublish-SearchUrl"
HDR_PLUGIN_NAME = "Gepublish-PluginName"
HDR_PLUGIN_CLASS_NAME = "Gepublish-PluginClassName"
HDR_DATA = "Gepublish-Data"
HDR_JSON_RESULTS = "results"
HDR_JSON_STATUS_CODE = "status_code"
HDR_JSON_STATUS_MESSAGE = "status_message"
CUTTER_GLOBES_PATH = "/opt/google/gehttpd/htdocs/cutter/globes"
STATUS_FAILURE = -1
STATUS_SUCCESS = 0
STATUS_UPLOAD_NEEDED = 1
| true | true |
1c382f14053fc6ec2c7e73bebc1a8bdd0037777f | 333 | py | Python | scripts/5.2.plot_stability.py | nmningmei/agent_models | 8380f1203e6d5a18f18f9adeb6bd36b23b2ae61b | [
"MIT"
] | null | null | null | scripts/5.2.plot_stability.py | nmningmei/agent_models | 8380f1203e6d5a18f18f9adeb6bd36b23b2ae61b | [
"MIT"
] | null | null | null | scripts/5.2.plot_stability.py | nmningmei/agent_models | 8380f1203e6d5a18f18f9adeb6bd36b23b2ae61b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 29 08:37:19 2020
@author: nmei
"""
import os
import gc
from glob import glob
from tqdm import tqdm
import numpy as np
import pandas as pd
import seaborn as sns
working_dir = '../stability'
working_data = glob(os.path.join(working_dir,'*','stability*.npy'))
| 15.136364 | 67 | 0.6997 |
import os
import gc
from glob import glob
from tqdm import tqdm
import numpy as np
import pandas as pd
import seaborn as sns
working_dir = '../stability'
working_data = glob(os.path.join(working_dir,'*','stability*.npy'))
| true | true |
1c382f9eb7b88a9524ab2bb8a7334e5fa289e7f2 | 3,001 | py | Python | vim/choices/letter.py | GoNZooo/dragonfly-grammars | 22d639d8f86f4f5a7c44caa73e75c4938c0ce199 | [
"MIT"
] | 3 | 2020-09-06T10:40:19.000Z | 2020-09-29T20:39:52.000Z | vim/choices/letter.py | GoNZooo/dragonfly-grammars | 22d639d8f86f4f5a7c44caa73e75c4938c0ce199 | [
"MIT"
] | null | null | null | vim/choices/letter.py | GoNZooo/dragonfly-grammars | 22d639d8f86f4f5a7c44caa73e75c4938c0ce199 | [
"MIT"
] | null | null | null | from dragonfly import (Choice)
def letter_choice(name="letter"):
return Choice(name, {
'air': 'a',
'bat': 'b',
'cap': 'c',
'drum': 'd',
'each': 'e',
'fine': 'f',
'gust': 'g',
'harp': 'h',
'sit': 'i',
'jim': 'j',
'kick': 'k',
'look': 'l',
'made': 'm',
'near': 'n',
'on': 'o',
'pig': 'p',
'quench': 'q',
'red': 'r',
'sun': 's',
'trap': 't',
'urge': 'u',
'vest': 'v',
'whale': 'w',
'plex': 'x',
'yep': 'y',
'zip': 'z',
'ship air': 'A',
'ship bat': 'B',
'ship cap': 'C',
'ship drum': 'D',
'ship each': 'E',
'ship fine': 'F',
'ship gust': 'G',
'ship harp': 'H',
'ship sit': 'I',
'ship jim': 'J',
'ship kick': 'K',
'ship look': 'L',
'ship made': 'M',
'ship near': 'N',
'ship on': 'O',
'ship pig': 'P',
'ship quench': 'Q',
'ship red': 'R',
'ship sun': 'S',
'ship trap': 'T',
'ship urge': 'U',
'ship vest': 'V',
'ship whale': 'W',
'ship plex': 'X',
'ship yep': 'Y',
'ship zip': 'Z',
'zero': '0',
'one': '1',
'two': '2',
'three': '3',
'four': '4',
'five': '5',
'six': '6',
'seven': '7',
'eight': '8',
'nine': '9',
'space': 'space',
'tabby': 'tab',
'backtick': 'backtick',
'pipe': 'bar',
# vim functionality symbols
'colon': 'colon',
'slash': 'slash',
'hat': 'caret',
'dollar': 'dollar',
# more or less important symbols for code
'spamma': 'comma',
'assign': 'equal',
'bang': 'exclamation',
# quotes
'(double|dub)': 'dquote',
'(single|sing)': 'squote',
'score': 'underscore',
'quest': 'question',
'ampersand': 'ampersand',
'(apostrophe | post)': 'apostrophe',
'starling': 'asterisk',
'at sign': 'at',
'backslash': 'backslash',
'prick': 'dot',
'hashtag': 'hash',
'hyphen': 'hyphen',
'minus': 'minus',
'purse': 'percent',
'plus': 'plus',
'semi': 'semicolon',
'tilde': 'tilde',
# parens
'lice': 'lparen',
'rice': 'rparen',
# angle brackets
'langle': 'langle',
'rangle': 'rangle',
# curly braces
'lace': 'lbrace',
'race': 'rbrace',
# square brackets
'lack': 'lbracket',
'rack': 'rbracket',
# simple composite comparison operators
'seek': 'slash,equal',
'eek': 'equal,equal',
'beak': 'exclamation,equal',
# editing
'backspace': 'backspace',
'(escape|okay)': 'escape',
'slap': 'enter'
})
| 22.22963 | 49 | 0.377541 | from dragonfly import (Choice)
def letter_choice(name="letter"):
return Choice(name, {
'air': 'a',
'bat': 'b',
'cap': 'c',
'drum': 'd',
'each': 'e',
'fine': 'f',
'gust': 'g',
'harp': 'h',
'sit': 'i',
'jim': 'j',
'kick': 'k',
'look': 'l',
'made': 'm',
'near': 'n',
'on': 'o',
'pig': 'p',
'quench': 'q',
'red': 'r',
'sun': 's',
'trap': 't',
'urge': 'u',
'vest': 'v',
'whale': 'w',
'plex': 'x',
'yep': 'y',
'zip': 'z',
'ship air': 'A',
'ship bat': 'B',
'ship cap': 'C',
'ship drum': 'D',
'ship each': 'E',
'ship fine': 'F',
'ship gust': 'G',
'ship harp': 'H',
'ship sit': 'I',
'ship jim': 'J',
'ship kick': 'K',
'ship look': 'L',
'ship made': 'M',
'ship near': 'N',
'ship on': 'O',
'ship pig': 'P',
'ship quench': 'Q',
'ship red': 'R',
'ship sun': 'S',
'ship trap': 'T',
'ship urge': 'U',
'ship vest': 'V',
'ship whale': 'W',
'ship plex': 'X',
'ship yep': 'Y',
'ship zip': 'Z',
'zero': '0',
'one': '1',
'two': '2',
'three': '3',
'four': '4',
'five': '5',
'six': '6',
'seven': '7',
'eight': '8',
'nine': '9',
'space': 'space',
'tabby': 'tab',
'backtick': 'backtick',
'pipe': 'bar',
'colon': 'colon',
'slash': 'slash',
'hat': 'caret',
'dollar': 'dollar',
'spamma': 'comma',
'assign': 'equal',
'bang': 'exclamation',
'(double|dub)': 'dquote',
'(single|sing)': 'squote',
'score': 'underscore',
'quest': 'question',
'ampersand': 'ampersand',
'(apostrophe | post)': 'apostrophe',
'starling': 'asterisk',
'at sign': 'at',
'backslash': 'backslash',
'prick': 'dot',
'hashtag': 'hash',
'hyphen': 'hyphen',
'minus': 'minus',
'purse': 'percent',
'plus': 'plus',
'semi': 'semicolon',
'tilde': 'tilde',
'lice': 'lparen',
'rice': 'rparen',
'langle': 'langle',
'rangle': 'rangle',
'lace': 'lbrace',
'race': 'rbrace',
'lack': 'lbracket',
'rack': 'rbracket',
'seek': 'slash,equal',
'eek': 'equal,equal',
'beak': 'exclamation,equal',
'backspace': 'backspace',
'(escape|okay)': 'escape',
'slap': 'enter'
})
| true | true |
1c38306c2d00deac55934d2428e42f226f59dead | 1,096 | py | Python | app/app/urls.py | gbr-mendes/ead-courses-api | fd0fcc8aef996c4b671f34d51a8a74aafdf78480 | [
"MIT"
] | 1 | 2022-01-31T18:12:42.000Z | 2022-01-31T18:12:42.000Z | app/app/urls.py | gbr-mendes/ead-courses-api | fd0fcc8aef996c4b671f34d51a8a74aafdf78480 | [
"MIT"
] | null | null | null | app/app/urls.py | gbr-mendes/ead-courses-api | fd0fcc8aef996c4b671f34d51a8a74aafdf78480 | [
"MIT"
] | null | null | null | """app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from university import views
urlpatterns = [
# Django urls
path('admin/', admin.site.urls),
# Third party urls
path('api-auth/', include('rest_framework.urls')),
# API URL's
path('', views.ExibitionView.as_view(), name='home_message'),# apresatation of project
path('api/accounts/', include('accounts.urls')),
path('api/university/', include('university.urls')),
]
| 36.533333 | 90 | 0.69708 | from django.contrib import admin
from django.urls import path, include
from university import views
urlpatterns = [
path('admin/', admin.site.urls),
path('api-auth/', include('rest_framework.urls')),
path('', views.ExibitionView.as_view(), name='home_message'),# apresatation of project
path('api/accounts/', include('accounts.urls')),
path('api/university/', include('university.urls')),
]
| true | true |
1c3830931ecef6f54d56e54465037878caf81142 | 8,102 | py | Python | spatial_reasoning/models/uvfa_text.py | MarcCote/spatial-reasoning | 06c57cfafbd1c24b68d6ab634d19806964d867f3 | [
"MIT"
] | null | null | null | spatial_reasoning/models/uvfa_text.py | MarcCote/spatial-reasoning | 06c57cfafbd1c24b68d6ab634d19806964d867f3 | [
"MIT"
] | null | null | null | spatial_reasoning/models/uvfa_text.py | MarcCote/spatial-reasoning | 06c57cfafbd1c24b68d6ab634d19806964d867f3 | [
"MIT"
] | null | null | null | ## predicts entire value map
## rather than a single value
import torch
import math, torch.nn as nn, pdb
import torch.nn.functional as F
from torch.autograd import Variable
import spatial_reasoning.models as models
import spatial_reasoning.utils as utils
class UVFA_text(nn.Module):
def __init__(self, lstm, state_vocab, object_vocab, args, map_dim = 10, batch_size = 32):
super(UVFA_text, self).__init__()
self.state_vocab = state_vocab
self.object_vocab = object_vocab
self.lstm = lstm
self.rank = args.rank
self.map_dim = map_dim
self.batch_size = batch_size
self.positions = self.__agent_pos()
## add one for agent position
self.state_dim = (self.state_vocab+1) * (map_dim**2)
# self.state_dim = self.state_vocab * map_dim**2
self.state_layers = [self.state_dim, 128, 128, args.rank]
self.state_mlp = models.MLP(self.state_layers)
self.object_dim = self.object_vocab * (map_dim**2)
self.object_layers = [self.object_dim, 128, 128, args.rank]
self.object_mlp = models.MLP(self.object_layers)
'''
returns tensor with one-hot vector encoding
[1, 2, 3, ..., map_dim] repeated batch_size times
< batch_size * map_dim, state_vocab >
'''
def __agent_pos(self):
size = self.map_dim**2
positions = torch.zeros(self.batch_size*size, 100, 1)
# print(positions.size())
for ind in range(size):
# print(ind, ind*self.batch_size, (ind+1)*self.batch_size, ind, positions.size())
# positions[ind*self.batch_size:(ind+1)*self.batch_size, ind] = 1
positions[ind:self.batch_size*size:size, ind] = 1
# pdb.set_trace()
return Variable( positions.cuda() )
def __repeat_position(self, x):
if x.size() == 2:
return x.unsqueeze(1).repeat(1,self.map_dim**2,1)
else:
return x.unsqueeze(1).repeat(1,self.map_dim**2,1,1)
'''
< batch_size x N >
< batch_size*100 x N >
'''
def __construct_inp(self, state, obj, text):
state = self.__repeat_position(state)
state = state.view(self.batch_size*self.map_dim**2,self.map_dim**2,self.state_vocab)
## add agent position
state = torch.cat( (state, self.positions), -1)
## reshape to (batched) vector for input to MLPs
state = state.view(-1, self.state_dim)
obj = self.__repeat_position(obj)
obj = obj.view(self.batch_size*self.map_dim**2,self.map_dim**2,self.object_vocab)
obj = obj.view(-1, self.object_dim)
instr_length = text.size(1)
## < batch x length >
## < batch x 100 x length >
text = self.__repeat_position(text)
## < batch*100 x length >
text = text.view(self.batch_size*self.map_dim**2,instr_length)
## < length x batch*100 >
text = text.transpose(0,1)
## < batch*100 x rank >
return state, obj, text
def forward(self, inp):
(state, obj, text) = inp
batch_size = state.size(0)
# text = text.transpose(0,1)
hidden = self.lstm.init_hidden(batch_size * self.map_dim**2)
if batch_size != self.batch_size:
self.batch_size = batch_size
self.positions = self.__agent_pos()
## reshape to (batched) vectors
## can't scatter Variables
state = state.data.view(-1, self.map_dim**2, 1)
obj = obj.data.view(-1, self.map_dim**2, 1)
## make state / object indices into one-hot vectors
state_binary = torch.zeros(batch_size, self.map_dim**2, self.state_vocab).cuda()
object_binary = torch.zeros(batch_size, self.map_dim**2, self.object_vocab).cuda()
state_binary.scatter_(2, state, 1)
object_binary.scatter_(2, obj, 1)
state_binary = Variable( state_binary )
object_binary = Variable( object_binary )
state_binary, object_binary, text = self.__construct_inp(state_binary, object_binary, text)
# print(state_binary.size(), object_binary.size(), text.size())
# object_binary = self.__repeat_position(object_binary)
# object_binary = object_binary.view(self.batch_size*self.map_dim**2,self.map_dim**2,self.object_vocab)
## add in agent position
## < batch x 100 x 2 >
## < batch x 100 x 100 x 2 >
# state_binary = self.__repeat_position(state_binary)
## < batch*100 x 100 x 2 >
# state_binary = state_binary.view(self.batch_size*self.map_dim**2,self.map_dim**2,self.state_vocab)
## add agent position
# state_binary = torch.cat( (state_binary, self.positions), -1)
# pdb.set_trace()
# print(state_binary.size(), object_binary.size())
## reshape to (batched) vectors for input to MLPs
## turn back into Variables for backprop
# state_binary = state_binary.view(-1, self.state_dim)
# object_binary = object_binary.view(-1, self.object_dim)
## < batch*100 x rank >
state_out = self.state_mlp(state_binary)
object_out = self.object_mlp(object_binary)
lstm_out = self.lstm.forward(text, hidden)
# print(lstm_out.size())
values = state_out * object_out * lstm_out
map_pred = values.sum(1, keepdim=True).view(self.batch_size, self.map_dim, self.map_dim)
return map_pred
# def forward(self, inp):
# (state, obj, text) = inp
# batch_size = state.size(0)
# text = text.transpose(0,1)
# hidden = self.lstm.init_hidden(batch_size)
# if batch_size != self.batch_size:
# self.batch_size = batch_size
# # self.positions = self.__agent_pos()
# ## reshape to (batched) vectors
# ## can't scatter Variables
# state = state.data.view(-1, self.map_dim**2, 1)
# obj = obj.data.view(-1, self.map_dim**2, 1)
# ## make state / object indices into one-hot vectors
# state_binary = torch.zeros(batch_size, self.map_dim**2, self.state_vocab).cuda()
# object_binary = torch.zeros(batch_size, self.map_dim**2, self.object_vocab).cuda()
# state_binary.scatter_(2, state, 1)
# object_binary.scatter_(2, obj, 1)
# state_binary = Variable( state_binary )
# object_binary = Variable( object_binary )
# ## add in agent position
# ## < batch x 100 x 2 >
# ## < batch x 100 x 100 x 2 >
# # state_binary = state_binary.unsqueeze(1).repeat(1,self.map_dim**2,1,1)
# # state_binary = self.__repeat_position(state_binary)
# ## < batch*100 x 100 x 2 >
# # state_binary = state_binary.view(self.batch_size*self.map_dim**2,self.map_dim**2,self.state_vocab)
# ## add agent position
# # state_binary = torch.cat( (state_binary, self.positions), -1)
# # pdb.set_trace()
# # print(state_binary.size(), object_binary.size())
# ## reshape to (batched) vectors for input to MLPs
# ## turn back into Variables for backprop
# state_binary = state_binary.view(-1, self.state_dim)
# object_binary = object_binary.view(-1, self.object_dim)
# print('state: ', state_binary.size(), object_binary.size())
# state_out = self.state_mlp(state_binary)
# object_out = self.object_mlp(object_binary)
# # print(state_out.size(), object_out.size())
# lstm_out = self.lstm.forward(text, hidden)
# # object_out = self.__repeat_position(object_out).view(-1, self.rank)
# # lstm_out = self.__repeat_position(lstm_out).view(-1, self.rank)
# # print(state_out.size(), object_out.size(), lstm_out.size())
# values = state_out * object_out * lstm_out
# # map_pred = values.sum(1).view(self.batch_size, self.map_dim**2)
# values = values.sum(1)
# # print(values.size())
# map_pred = values.unsqueeze(-1).repeat(1,self.map_dim,self.map_dim)
# print(values.size(), map_pred.size())
# return map_pred
| 36.169643 | 111 | 0.620464 | torch.nn.functional as F
from torch.autograd import Variable
import spatial_reasoning.models as models
import spatial_reasoning.utils as utils
class UVFA_text(nn.Module):
def __init__(self, lstm, state_vocab, object_vocab, args, map_dim = 10, batch_size = 32):
super(UVFA_text, self).__init__()
self.state_vocab = state_vocab
self.object_vocab = object_vocab
self.lstm = lstm
self.rank = args.rank
self.map_dim = map_dim
self.batch_size = batch_size
self.positions = self.__agent_pos()
elf.state_vocab+1) * (map_dim**2)
self.state_layers = [self.state_dim, 128, 128, args.rank]
self.state_mlp = models.MLP(self.state_layers)
self.object_dim = self.object_vocab * (map_dim**2)
self.object_layers = [self.object_dim, 128, 128, args.rank]
self.object_mlp = models.MLP(self.object_layers)
def __agent_pos(self):
size = self.map_dim**2
positions = torch.zeros(self.batch_size*size, 100, 1)
for ind in range(size):
positions[ind:self.batch_size*size:size, ind] = 1
return Variable( positions.cuda() )
def __repeat_position(self, x):
if x.size() == 2:
return x.unsqueeze(1).repeat(1,self.map_dim**2,1)
else:
return x.unsqueeze(1).repeat(1,self.map_dim**2,1,1)
def __construct_inp(self, state, obj, text):
state = self.__repeat_position(state)
state = state.view(self.batch_size*self.map_dim**2,self.map_dim**2,self.state_vocab)
ch.cat( (state, self.positions), -1)
obj = self.__repeat_position(obj)
obj = obj.view(self.batch_size*self.map_dim**2,self.map_dim**2,self.object_vocab)
obj = obj.view(-1, self.object_dim)
instr_length = text.size(1)
n(text)
w(self.batch_size*self.map_dim**2,instr_length)
nspose(0,1)
, obj, text
def forward(self, inp):
(state, obj, text) = inp
batch_size = state.size(0)
hidden = self.lstm.init_hidden(batch_size * self.map_dim**2)
if batch_size != self.batch_size:
self.batch_size = batch_size
self.positions = self.__agent_pos()
dim**2, 1)
obj = obj.data.view(-1, self.map_dim**2, 1)
## make state / object indices into one-hot vectors
state_binary = torch.zeros(batch_size, self.map_dim**2, self.state_vocab).cuda()
object_binary = torch.zeros(batch_size, self.map_dim**2, self.object_vocab).cuda()
state_binary.scatter_(2, state, 1)
object_binary.scatter_(2, obj, 1)
state_binary = Variable( state_binary )
object_binary = Variable( object_binary )
state_binary, object_binary, text = self.__construct_inp(state_binary, object_binary, text)
# print(state_binary.size(), object_binary.size(), text.size())
# object_binary = self.__repeat_position(object_binary)
# object_binary = object_binary.view(self.batch_size*self.map_dim**2,self.map_dim**2,self.object_vocab)
## add in agent position
## < batch x 100 x 2 >
## < batch x 100 x 100 x 2 >
# state_binary = self.__repeat_position(state_binary)
## < batch*100 x 100 x 2 >
# state_binary = state_binary.view(self.batch_size*self.map_dim**2,self.map_dim**2,self.state_vocab)
## add agent position
# state_binary = torch.cat( (state_binary, self.positions), -1)
# pdb.set_trace()
# print(state_binary.size(), object_binary.size())
## reshape to (batched) vectors for input to MLPs
## turn back into Variables for backprop
# state_binary = state_binary.view(-1, self.state_dim)
# object_binary = object_binary.view(-1, self.object_dim)
## < batch*100 x rank >
state_out = self.state_mlp(state_binary)
object_out = self.object_mlp(object_binary)
lstm_out = self.lstm.forward(text, hidden)
# print(lstm_out.size())
values = state_out * object_out * lstm_out
map_pred = values.sum(1, keepdim=True).view(self.batch_size, self.map_dim, self.map_dim)
return map_pred
# def forward(self, inp):
# (state, obj, text) = inp
# batch_size = state.size(0)
# text = text.transpose(0,1)
# hidden = self.lstm.init_hidden(batch_size)
# if batch_size != self.batch_size:
# self.batch_size = batch_size
# # self.positions = self.__agent_pos()
# ## reshape to (batched) vectors
# ## can't scatter Variables
| true | true |
1c383171fd75508cefec057633d20ca9a2f06f7e | 8,220 | py | Python | bzt/modules/java/tools.py | YajanaRao/taurus | c08e5b90063bf4f7904e8ec4eb5f3c50a8e89ac0 | [
"Apache-2.0"
] | null | null | null | bzt/modules/java/tools.py | YajanaRao/taurus | c08e5b90063bf4f7904e8ec4eb5f3c50a8e89ac0 | [
"Apache-2.0"
] | null | null | null | bzt/modules/java/tools.py | YajanaRao/taurus | c08e5b90063bf4f7904e8ec4eb5f3c50a8e89ac0 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2018 BlazeMeter Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
import subprocess
from bzt import ToolError
from bzt.utils import shell_exec, sync_run, RequiredTool, parse_java_version
class JarTool(RequiredTool):
VERSION = ""
URL = "{remote_addr}{remote_path}"
REMOTE_ADDR = "http://search.maven.org/remotecontent?filepath="
REMOTE_PATH = ""
LOCAL_PATH = "~/.bzt/selenium-taurus/{tool_file}"
def __init__(self, tool_name, local_path, tool_file):
tool_file = tool_file.format(version=self.VERSION)
remote_path = self.REMOTE_PATH.format(version=self.VERSION)
if not local_path:
local_path = self.LOCAL_PATH
local_path = local_path.format(tool_file=tool_file)
download_link = self.URL.format(remote_addr=self.REMOTE_ADDR, remote_path=remote_path)
super(JarTool, self).__init__(tool_name=tool_name, tool_path=local_path, download_link=download_link)
class JavaC(RequiredTool):
def __init__(self, tool_path="javac"):
super(JavaC, self).__init__("JavaC", tool_path)
def _get_version(self, output):
versions = re.findall("javac\ ([\d\._]*)", output)
version = parse_java_version(versions)
if not version:
self.log.warning("Tool version parsing error: %s", output)
return version
def check_if_installed(self):
cmd = [self.tool_path, '-version']
self.log.debug("Trying %s: %s", self.tool_name, cmd)
try:
output = sync_run(cmd)
self.log.debug("%s output: %s", self.tool_name, output)
self.version = self._get_version(output)
return True
except (subprocess.CalledProcessError, OSError) as exc:
self.log.debug("Failed to check %s: %s", self.tool_name, exc)
return False
def install(self):
raise ToolError("The %s is not operable or not available. Consider installing it" % self.tool_name)
class SeleniumServer(JarTool):
VERSION = "3.6"
REMOTE_ADDR = "http://selenium-release.storage.googleapis.com/"
REMOTE_PATH = "{version}/selenium-server-standalone-{version}.0.jar"
def __init__(self, local_path=""):
tool_file = "selenium-server-{version}.jar"
super(SeleniumServer, self).__init__("Selenium server", local_path, tool_file)
def check_if_installed(self):
self.log.debug("%s path: %s", self.tool_name, self.tool_path)
selenium_launch_command = ["java", "-jar", self.tool_path, "-help"]
selenium_subproc = shell_exec(selenium_launch_command, stderr=subprocess.STDOUT)
output = selenium_subproc.communicate()
self.log.debug("%s output: %s", self.tool_name, output)
if selenium_subproc.returncode == 0:
self.already_installed = True
return True
else:
return False
class Json(JarTool):
REMOTE_PATH = "org/json/json/20160810/json-20160810.jar"
def __init__(self, tool_path=""):
tool_file = "json.jar"
super(Json, self).__init__("Json", tool_path, tool_file)
class TestNG(JarTool):
VERSION = "6.8.5"
REMOTE_PATH = "org/testng/testng/{version}/testng-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "testng-{version}.jar"
super(TestNG, self).__init__("TestNG", tool_path, tool_file)
class Hamcrest(JarTool):
VERSION = "1.3"
REMOTE_PATH = "org/hamcrest/hamcrest-core/{version}/hamcrest-core-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "hamcrest-core-{version}.jar"
super(Hamcrest, self).__init__("HamcrestJar", tool_path, tool_file)
class JUnitJupiterApi(JarTool):
VERSION = "5.2.0"
REMOTE_PATH = "org/junit/jupiter/junit-jupiter-api/{version}/junit-jupiter-api-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-jupiter-api-{version}.jar"
super(JUnitJupiterApi, self).__init__("JUnitJupiterApi", tool_path, tool_file)
class JUnitJupiterEngine(JarTool):
VERSION = "5.2.0"
REMOTE_PATH = "org/junit/jupiter/junit-jupiter-engine/{version}/junit-jupiter-engine-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-jupiter-engine-{version}.jar"
super(JUnitJupiterEngine, self).__init__("JUnitJupiterEngine", tool_path, tool_file)
class JUnitVintageEngine(JarTool):
VERSION = "5.2.0"
REMOTE_PATH = "org/junit/vintage/junit-vintage-engine/{version}/junit-vintage-engine-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-vintage-engine-{version}.jar"
super(JUnitVintageEngine, self).__init__("JUnitVintageEngine", tool_path, tool_file)
class JUnitPlatformCommons(JarTool):
VERSION = "1.2.0"
REMOTE_PATH = "org/junit/platform/junit-platform-commons/{version}/junit-platform-commons-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-platform-commons-{version}.jar"
super(JUnitPlatformCommons, self).__init__("JUnitPlatformCommons", tool_path, tool_file)
class JUnitPlatformEngine(JarTool):
VERSION = "1.2.0"
REMOTE_PATH = "org/junit/platform/junit-platform-engine/{version}/junit-platform-engine-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-platform-engine-{version}.jar"
super(JUnitPlatformEngine, self).__init__("JUnitPlatformEngine", tool_path, tool_file)
class JUnitPlatformLauncher(JarTool):
VERSION = "1.2.0"
REMOTE_PATH = "org/junit/platform/junit-platform-launcher/{version}/junit-platform-launcher-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-platform-launcher-{version}.jar"
super(JUnitPlatformLauncher, self).__init__("JUnitPlatformLauncher", tool_path, tool_file)
class JUnitPlatformRunner(JarTool):
VERSION = "1.2.0"
REMOTE_PATH = "org/junit/platform/junit-platform-runner/{version}/junit-platform-runner-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-platform-runner-{version}.jar"
super(JUnitPlatformRunner, self).__init__("JUnitPlatformRunner", tool_path, tool_file)
class JUnitPlatformSuiteApi(JarTool):
VERSION = "1.2.0"
REMOTE_PATH = "org/junit/platform/junit-platform-suite-api/{version}/junit-platform-suite-api-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-platform-suite-api-{version}.jar"
super(JUnitPlatformSuiteApi, self).__init__("JUnitPlatformSuiteApi", tool_path, tool_file)
class ApiGuardian(JarTool):
VERSION = "1.0.0"
REMOTE_PATH = "org/apiguardian/apiguardian-api/{version}/apiguardian-api-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "apiguardian-api-{version}.jar"
super(ApiGuardian, self).__init__("ApiGuardian", tool_path, tool_file)
class OpenTest4j(JarTool):
VERSION = "1.1.0"
REMOTE_PATH = "org/opentest4j/opentest4j/{version}/opentest4j-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "opentest4j-{version}.jar"
super(OpenTest4j, self).__init__("OpenTest4j", tool_path, tool_file)
class JUnit(JarTool):
VERSION = "4.12"
REMOTE_PATH = "junit/junit/{version}/junit-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-{version}.jar"
super(JUnit, self).__init__("JUnit", tool_path, tool_file)
class TaurusJavaHelper(JarTool):
VERSION = "1.4"
REMOTE_PATH = "com/blazemeter/taurus-java-helpers/{version}/taurus-java-helpers-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "taurus-java-helpers-{version}.jar"
super(TaurusJavaHelper, self).__init__("TaurusJavaHelper", tool_path, tool_file)
| 36.533333 | 112 | 0.693309 | import re
import subprocess
from bzt import ToolError
from bzt.utils import shell_exec, sync_run, RequiredTool, parse_java_version
class JarTool(RequiredTool):
VERSION = ""
URL = "{remote_addr}{remote_path}"
REMOTE_ADDR = "http://search.maven.org/remotecontent?filepath="
REMOTE_PATH = ""
LOCAL_PATH = "~/.bzt/selenium-taurus/{tool_file}"
def __init__(self, tool_name, local_path, tool_file):
tool_file = tool_file.format(version=self.VERSION)
remote_path = self.REMOTE_PATH.format(version=self.VERSION)
if not local_path:
local_path = self.LOCAL_PATH
local_path = local_path.format(tool_file=tool_file)
download_link = self.URL.format(remote_addr=self.REMOTE_ADDR, remote_path=remote_path)
super(JarTool, self).__init__(tool_name=tool_name, tool_path=local_path, download_link=download_link)
class JavaC(RequiredTool):
def __init__(self, tool_path="javac"):
super(JavaC, self).__init__("JavaC", tool_path)
def _get_version(self, output):
versions = re.findall("javac\ ([\d\._]*)", output)
version = parse_java_version(versions)
if not version:
self.log.warning("Tool version parsing error: %s", output)
return version
def check_if_installed(self):
cmd = [self.tool_path, '-version']
self.log.debug("Trying %s: %s", self.tool_name, cmd)
try:
output = sync_run(cmd)
self.log.debug("%s output: %s", self.tool_name, output)
self.version = self._get_version(output)
return True
except (subprocess.CalledProcessError, OSError) as exc:
self.log.debug("Failed to check %s: %s", self.tool_name, exc)
return False
def install(self):
raise ToolError("The %s is not operable or not available. Consider installing it" % self.tool_name)
class SeleniumServer(JarTool):
VERSION = "3.6"
REMOTE_ADDR = "http://selenium-release.storage.googleapis.com/"
REMOTE_PATH = "{version}/selenium-server-standalone-{version}.0.jar"
def __init__(self, local_path=""):
tool_file = "selenium-server-{version}.jar"
super(SeleniumServer, self).__init__("Selenium server", local_path, tool_file)
def check_if_installed(self):
self.log.debug("%s path: %s", self.tool_name, self.tool_path)
selenium_launch_command = ["java", "-jar", self.tool_path, "-help"]
selenium_subproc = shell_exec(selenium_launch_command, stderr=subprocess.STDOUT)
output = selenium_subproc.communicate()
self.log.debug("%s output: %s", self.tool_name, output)
if selenium_subproc.returncode == 0:
self.already_installed = True
return True
else:
return False
class Json(JarTool):
REMOTE_PATH = "org/json/json/20160810/json-20160810.jar"
def __init__(self, tool_path=""):
tool_file = "json.jar"
super(Json, self).__init__("Json", tool_path, tool_file)
class TestNG(JarTool):
VERSION = "6.8.5"
REMOTE_PATH = "org/testng/testng/{version}/testng-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "testng-{version}.jar"
super(TestNG, self).__init__("TestNG", tool_path, tool_file)
class Hamcrest(JarTool):
VERSION = "1.3"
REMOTE_PATH = "org/hamcrest/hamcrest-core/{version}/hamcrest-core-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "hamcrest-core-{version}.jar"
super(Hamcrest, self).__init__("HamcrestJar", tool_path, tool_file)
class JUnitJupiterApi(JarTool):
VERSION = "5.2.0"
REMOTE_PATH = "org/junit/jupiter/junit-jupiter-api/{version}/junit-jupiter-api-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-jupiter-api-{version}.jar"
super(JUnitJupiterApi, self).__init__("JUnitJupiterApi", tool_path, tool_file)
class JUnitJupiterEngine(JarTool):
VERSION = "5.2.0"
REMOTE_PATH = "org/junit/jupiter/junit-jupiter-engine/{version}/junit-jupiter-engine-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-jupiter-engine-{version}.jar"
super(JUnitJupiterEngine, self).__init__("JUnitJupiterEngine", tool_path, tool_file)
class JUnitVintageEngine(JarTool):
VERSION = "5.2.0"
REMOTE_PATH = "org/junit/vintage/junit-vintage-engine/{version}/junit-vintage-engine-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-vintage-engine-{version}.jar"
super(JUnitVintageEngine, self).__init__("JUnitVintageEngine", tool_path, tool_file)
class JUnitPlatformCommons(JarTool):
VERSION = "1.2.0"
REMOTE_PATH = "org/junit/platform/junit-platform-commons/{version}/junit-platform-commons-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-platform-commons-{version}.jar"
super(JUnitPlatformCommons, self).__init__("JUnitPlatformCommons", tool_path, tool_file)
class JUnitPlatformEngine(JarTool):
VERSION = "1.2.0"
REMOTE_PATH = "org/junit/platform/junit-platform-engine/{version}/junit-platform-engine-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-platform-engine-{version}.jar"
super(JUnitPlatformEngine, self).__init__("JUnitPlatformEngine", tool_path, tool_file)
class JUnitPlatformLauncher(JarTool):
VERSION = "1.2.0"
REMOTE_PATH = "org/junit/platform/junit-platform-launcher/{version}/junit-platform-launcher-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-platform-launcher-{version}.jar"
super(JUnitPlatformLauncher, self).__init__("JUnitPlatformLauncher", tool_path, tool_file)
class JUnitPlatformRunner(JarTool):
VERSION = "1.2.0"
REMOTE_PATH = "org/junit/platform/junit-platform-runner/{version}/junit-platform-runner-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-platform-runner-{version}.jar"
super(JUnitPlatformRunner, self).__init__("JUnitPlatformRunner", tool_path, tool_file)
class JUnitPlatformSuiteApi(JarTool):
VERSION = "1.2.0"
REMOTE_PATH = "org/junit/platform/junit-platform-suite-api/{version}/junit-platform-suite-api-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-platform-suite-api-{version}.jar"
super(JUnitPlatformSuiteApi, self).__init__("JUnitPlatformSuiteApi", tool_path, tool_file)
class ApiGuardian(JarTool):
VERSION = "1.0.0"
REMOTE_PATH = "org/apiguardian/apiguardian-api/{version}/apiguardian-api-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "apiguardian-api-{version}.jar"
super(ApiGuardian, self).__init__("ApiGuardian", tool_path, tool_file)
class OpenTest4j(JarTool):
VERSION = "1.1.0"
REMOTE_PATH = "org/opentest4j/opentest4j/{version}/opentest4j-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "opentest4j-{version}.jar"
super(OpenTest4j, self).__init__("OpenTest4j", tool_path, tool_file)
class JUnit(JarTool):
VERSION = "4.12"
REMOTE_PATH = "junit/junit/{version}/junit-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "junit-{version}.jar"
super(JUnit, self).__init__("JUnit", tool_path, tool_file)
class TaurusJavaHelper(JarTool):
VERSION = "1.4"
REMOTE_PATH = "com/blazemeter/taurus-java-helpers/{version}/taurus-java-helpers-{version}.jar"
def __init__(self, tool_path=""):
tool_file = "taurus-java-helpers-{version}.jar"
super(TaurusJavaHelper, self).__init__("TaurusJavaHelper", tool_path, tool_file)
| true | true |
1c383228303b44befe4c767af4340793fe11954c | 7,109 | py | Python | chemfiles/trajectory.py | chemfiles/Chemharp.py | 45b8a02b7a0f07d6dcafa52db39df6a39f6f496c | [
"BSD-3-Clause"
] | 2 | 2019-04-17T11:13:13.000Z | 2021-04-28T20:34:49.000Z | chemfiles/trajectory.py | chemfiles/Chemharp.py | 45b8a02b7a0f07d6dcafa52db39df6a39f6f496c | [
"BSD-3-Clause"
] | 15 | 2016-02-19T21:51:33.000Z | 2021-07-21T09:01:52.000Z | chemfiles/trajectory.py | chemfiles/Chemharp.py | 45b8a02b7a0f07d6dcafa52db39df6a39f6f496c | [
"BSD-3-Clause"
] | 3 | 2020-06-16T08:41:24.000Z | 2021-07-22T14:51:33.000Z | # -*- coding=utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
from ctypes import c_uint64, c_char_p
import sys
from .utils import CxxPointer, _call_with_growing_buffer
from .frame import Frame, Topology
from .misc import ChemfilesError
# Python 2 compatibility
if sys.hexversion >= 0x03000000:
unicode_string = str
bytes_string = bytes
else:
unicode_string = unicode # noqa
bytes_string = str
class BaseTrajectory(CxxPointer):
def __init__(self, ptr):
self.__closed = False
super(BaseTrajectory, self).__init__(ptr, is_const=False)
def __check_opened(self):
if self.__closed:
raise ChemfilesError("Can not use a closed Trajectory")
def __del__(self):
if not self.__closed:
self.close()
def __enter__(self):
self.__check_opened()
return self
def __exit__(self, *args):
self.close()
def __iter__(self):
self.__check_opened()
for step in range(self.nsteps):
yield self.read_step(step)
def read(self):
"""
Read the next step of this :py:class:`Trajectory` and return the
corresponding :py:class:`Frame`.
"""
self.__check_opened()
frame = Frame()
self.ffi.chfl_trajectory_read(self.mut_ptr, frame.mut_ptr)
return frame
def read_step(self, step):
"""
Read a specific ``step`` in this :py:class:`Trajectory` and return the
corresponding :py:class:`Frame`.
"""
self.__check_opened()
frame = Frame()
self.ffi.chfl_trajectory_read_step(self.mut_ptr, c_uint64(step), frame.mut_ptr)
return frame
def write(self, frame):
"""Write a :py:class:`Frame` to this :py:class:`Trajectory`."""
self.__check_opened()
self.ffi.chfl_trajectory_write(self.mut_ptr, frame.ptr)
def set_topology(self, topology, format=""):
"""
Set the :py:class:`Topology` associated with this :py:class:`Trajectory`.
The new topology will be used when reading and writing the files,
replacing any topology in the frames or files.
If the ``topology`` parameter is a :py:class:`Topology` instance, it is
used directly. If the ``topology`` parameter is a string, the first
:py:class:`Frame` of the corresponding file is read, and the topology of
this frame is used.
When reading from a file, if ``format`` is not the empty string, it is
used as the file format instead of guessing it from the file extension.
"""
self.__check_opened()
if isinstance(topology, Topology):
self.ffi.chfl_trajectory_set_topology(self.mut_ptr, topology.ptr)
else:
self.ffi.chfl_trajectory_topology_file(
self.mut_ptr, topology.encode("utf8"), format.encode("utf8")
)
def set_cell(self, cell):
"""
Set the :py:class:`UnitCell` associated with this :py:class:`Trajectory`
to a copy of ``cell``.
This :py:class:`UnitCell` will be used when reading and writing the
files, replacing any unit cell in the frames or files.
"""
self.__check_opened()
self.ffi.chfl_trajectory_set_cell(self.mut_ptr, cell.ptr)
@property
def nsteps(self):
"""Get the current number of steps in this :py:class:`Trajectory`."""
self.__check_opened()
nsteps = c_uint64()
self.ffi.chfl_trajectory_nsteps(self.mut_ptr, nsteps)
return nsteps.value
@property
def path(self):
"""Get the path used to open this :py:class:`Trajectory`."""
self.__check_opened()
return _call_with_growing_buffer(
lambda buffer, size: self.ffi.chfl_trajectory_path(self.ptr, buffer, size),
initial=256,
)
def close(self):
"""
Close this :py:class:`Trajectory` and write any buffered content to the
file.
"""
self.__check_opened()
self.__closed = True
self.ffi.chfl_trajectory_close(self.ptr)
class Trajectory(BaseTrajectory):
"""
A :py:class:`Trajectory` represent a physical file from which we can read
:py:class:`Frame`.
"""
def __init__(self, path, mode="r", format=""):
"""
Open the file at the given ``path`` using the given ``mode`` and
optional file ``format``.
Valid modes are ``'r'`` for read, ``'w'`` for write and ``'a'`` for
append.
The ``format`` parameter is needed when the file format does not match
the extension, or when there is not standard extension for this format.
If `format` is an empty string, the format will be guessed from the
file extension.
"""
ptr = self.ffi.chfl_trajectory_with_format(
path.encode("utf8"), mode.encode("utf8"), format.encode("utf8")
)
# Store mode and format for __repr__
self.__mode = mode
self.__format = format
super(Trajectory, self).__init__(ptr)
def __repr__(self):
return "Trajectory('{}', '{}', '{}')".format(
self.path, self.__mode, self.__format
)
class MemoryTrajectory(BaseTrajectory):
"""
A :py:class:`MemoryTrajectory` allow to read/write in-memory data as though
it was a formatted file.
"""
def __init__(self, data="", mode="r", format=""):
"""
The ``format`` parameter is always required.
When reading (``mode`` is ``'r'``), the ``data`` parameter will be used
as the formatted file.
When writing (``mode`` is ``'w'``), the ``data`` parameter is ignored.
To get the memory buffer containing everything already written, use the
:py:func:`buffer` function.
"""
if not format:
raise ChemfilesError(
"'format' is required when creating a MemoryTrajectory"
)
if mode == "r":
if isinstance(data, unicode_string):
data = data.encode("utf8")
elif not isinstance(data, bytes_string):
raise ChemfilesError("the 'data' parameter must be a string")
ptr = self.ffi.chfl_trajectory_memory_reader(
data, len(data), format.encode("utf8")
)
elif mode == "w":
ptr = self.ffi.chfl_trajectory_memory_writer(format.encode("utf8"))
else:
raise ChemfilesError(
"invalid mode '{}' passed to MemoryTrajectory".format(mode)
)
super(MemoryTrajectory, self).__init__(ptr)
def __repr__(self):
return "MemoryTrajectory({}', '{}')".format(self.__mode, self.__format)
def buffer(self):
"""
Get the data written to this in-memory trajectory. This is not valid to
call when reading in-memory data.
"""
buffer = c_char_p()
size = c_uint64()
self.ffi.chfl_trajectory_memory_buffer(self.ptr, buffer, size)
return buffer.value
| 32.760369 | 87 | 0.60965 |
from __future__ import absolute_import, print_function, unicode_literals
from ctypes import c_uint64, c_char_p
import sys
from .utils import CxxPointer, _call_with_growing_buffer
from .frame import Frame, Topology
from .misc import ChemfilesError
if sys.hexversion >= 0x03000000:
unicode_string = str
bytes_string = bytes
else:
unicode_string = unicode
bytes_string = str
class BaseTrajectory(CxxPointer):
def __init__(self, ptr):
self.__closed = False
super(BaseTrajectory, self).__init__(ptr, is_const=False)
def __check_opened(self):
if self.__closed:
raise ChemfilesError("Can not use a closed Trajectory")
def __del__(self):
if not self.__closed:
self.close()
def __enter__(self):
self.__check_opened()
return self
def __exit__(self, *args):
self.close()
def __iter__(self):
self.__check_opened()
for step in range(self.nsteps):
yield self.read_step(step)
def read(self):
self.__check_opened()
frame = Frame()
self.ffi.chfl_trajectory_read(self.mut_ptr, frame.mut_ptr)
return frame
def read_step(self, step):
self.__check_opened()
frame = Frame()
self.ffi.chfl_trajectory_read_step(self.mut_ptr, c_uint64(step), frame.mut_ptr)
return frame
def write(self, frame):
self.__check_opened()
self.ffi.chfl_trajectory_write(self.mut_ptr, frame.ptr)
def set_topology(self, topology, format=""):
self.__check_opened()
if isinstance(topology, Topology):
self.ffi.chfl_trajectory_set_topology(self.mut_ptr, topology.ptr)
else:
self.ffi.chfl_trajectory_topology_file(
self.mut_ptr, topology.encode("utf8"), format.encode("utf8")
)
def set_cell(self, cell):
self.__check_opened()
self.ffi.chfl_trajectory_set_cell(self.mut_ptr, cell.ptr)
@property
def nsteps(self):
self.__check_opened()
nsteps = c_uint64()
self.ffi.chfl_trajectory_nsteps(self.mut_ptr, nsteps)
return nsteps.value
@property
def path(self):
self.__check_opened()
return _call_with_growing_buffer(
lambda buffer, size: self.ffi.chfl_trajectory_path(self.ptr, buffer, size),
initial=256,
)
def close(self):
self.__check_opened()
self.__closed = True
self.ffi.chfl_trajectory_close(self.ptr)
class Trajectory(BaseTrajectory):
def __init__(self, path, mode="r", format=""):
ptr = self.ffi.chfl_trajectory_with_format(
path.encode("utf8"), mode.encode("utf8"), format.encode("utf8")
)
self.__mode = mode
self.__format = format
super(Trajectory, self).__init__(ptr)
def __repr__(self):
return "Trajectory('{}', '{}', '{}')".format(
self.path, self.__mode, self.__format
)
class MemoryTrajectory(BaseTrajectory):
def __init__(self, data="", mode="r", format=""):
if not format:
raise ChemfilesError(
"'format' is required when creating a MemoryTrajectory"
)
if mode == "r":
if isinstance(data, unicode_string):
data = data.encode("utf8")
elif not isinstance(data, bytes_string):
raise ChemfilesError("the 'data' parameter must be a string")
ptr = self.ffi.chfl_trajectory_memory_reader(
data, len(data), format.encode("utf8")
)
elif mode == "w":
ptr = self.ffi.chfl_trajectory_memory_writer(format.encode("utf8"))
else:
raise ChemfilesError(
"invalid mode '{}' passed to MemoryTrajectory".format(mode)
)
super(MemoryTrajectory, self).__init__(ptr)
def __repr__(self):
return "MemoryTrajectory({}', '{}')".format(self.__mode, self.__format)
def buffer(self):
buffer = c_char_p()
size = c_uint64()
self.ffi.chfl_trajectory_memory_buffer(self.ptr, buffer, size)
return buffer.value
| true | true |
1c38332477ade3863d6275a24700253942626083 | 29,157 | py | Python | core/domain/collection_domain_test.py | alexewu/oppia | 57c3c660ab7974835ec068d7c7f5ce5b5f1f25ae | [
"Apache-2.0"
] | null | null | null | core/domain/collection_domain_test.py | alexewu/oppia | 57c3c660ab7974835ec068d7c7f5ce5b5f1f25ae | [
"Apache-2.0"
] | null | null | null | core/domain/collection_domain_test.py | alexewu/oppia | 57c3c660ab7974835ec068d7c7f5ce5b5f1f25ae | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#
# Copyright 2015 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for collection domain objects and methods defined on them."""
from core.domain import collection_domain
from core.domain import collection_services
from core.tests import test_utils
import feconf
import utils
# Dictionary-like data structures within sample YAML must be formatted
# alphabetically to match string equivalence with the YAML generation
# methods tested below.
#
# If evaluating differences in YAML, conversion to dict form via
# utils.dict_from_yaml can isolate differences quickly.
SAMPLE_YAML_CONTENT = ("""category: A category
language_code: en
nodes:
- exploration_id: an_exploration_id
objective: An objective
schema_version: %d
tags: []
title: A title
""") % (feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
class CollectionChangeTests(test_utils.GenericTestBase):
def test_collection_change_object_with_invalid_change_dict(self):
# Raises exception as 'cmd' command is not found in change_dict.
with self.assertRaisesRegexp(Exception, 'Invalid change_dict:'):
collection_domain.CollectionChange({'invalid_cmd': 'data'})
# Raises exception due to invalid property name.
with self.assertRaisesRegexp(Exception, 'Invalid change_dict:'):
collection_domain.CollectionChange({
'cmd': 'edit_collection_property',
'property_name': 'invalid_property_name',
})
# Raises exception due to invalid command.
with self.assertRaisesRegexp(Exception, 'Invalid change_dict:'):
collection_domain.CollectionChange({
'cmd': 'invalid_cmd'
})
def test_collection_change_object_with_swap_nodes(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'swap_nodes',
'first_index': 'first_index',
'second_index': 'second_index'
})
self.assertEqual(col_change_object.first_index, 'first_index')
self.assertEqual(col_change_object.second_index, 'second_index')
def test_collection_change_object_with_edit_collection_node_property(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'edit_collection_node_property',
'exploration_id': 'exploration_id',
'property_name': 'property_name',
'new_value': 'new_value',
'old_value': 'old_value'
})
self.assertEqual(col_change_object.exploration_id, 'exploration_id')
self.assertEqual(col_change_object.property_name, 'property_name')
self.assertEqual(col_change_object.new_value, 'new_value')
self.assertEqual(col_change_object.old_value, 'old_value')
def test_collection_change_object_with_add_collection_skill(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'add_collection_skill',
'name': 'name'
})
self.assertEqual(col_change_object.name, 'name')
def test_collection_change_object_with_add_question_id_to_skill(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'add_question_id_to_skill',
'skill_id': 'skill_id',
'question_id': 'question_id'
})
self.assertEqual(col_change_object.skill_id, 'skill_id')
self.assertEqual(col_change_object.question_id, 'question_id')
def test_collection_change_object_with_remove_question_id_from_skill(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'remove_question_id_from_skill',
'skill_id': 'skill_id',
'question_id': 'question_id'
})
self.assertEqual(col_change_object.skill_id, 'skill_id')
self.assertEqual(col_change_object.question_id, 'question_id')
def test_collection_change_object_with_delete_collection_skill(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'delete_collection_skill',
'skill_id': 'skill_id',
})
self.assertEqual(col_change_object.skill_id, 'skill_id')
class CollectionDomainUnitTests(test_utils.GenericTestBase):
"""Test the collection domain object."""
COLLECTION_ID = 'collection_id'
EXPLORATION_ID = 'exp_id_0'
def setUp(self):
super(CollectionDomainUnitTests, self).setUp()
self.save_new_valid_collection(
self.COLLECTION_ID, 'user@example.com', title='Title',
category='Category', objective='Objective',
exploration_id=self.EXPLORATION_ID)
self.collection = collection_services.get_collection_by_id(
self.COLLECTION_ID)
def _assert_validation_error(self, expected_error_substring):
"""Checks that the collection passes strict validation."""
with self.assertRaisesRegexp(
utils.ValidationError, expected_error_substring):
self.collection.validate()
def test_initial_validation(self):
"""Test validating a new, valid collection."""
self.collection.validate()
def test_title_validation(self):
self.collection.title = 0
self._assert_validation_error('Expected title to be a string')
def test_category_validation(self):
self.collection.category = 0
self._assert_validation_error('Expected category to be a string')
def test_objective_validation(self):
self.collection.objective = ''
self._assert_validation_error('objective must be specified')
self.collection.objective = 0
self._assert_validation_error('Expected objective to be a string')
def test_language_code_validation(self):
self.collection.language_code = ''
self._assert_validation_error('language must be specified')
self.collection.language_code = 0
self._assert_validation_error('Expected language code to be a string')
self.collection.language_code = 'xz'
self._assert_validation_error('Invalid language code')
def test_tags_validation(self):
self.collection.tags = 'abc'
self._assert_validation_error('Expected tags to be a list')
self.collection.tags = [2, 3]
self._assert_validation_error('Expected each tag to be a string')
self.collection.tags = ['', 'tag']
self._assert_validation_error('Tags should be non-empty')
self.collection.tags = ['234']
self._assert_validation_error(
'Tags should only contain lowercase letters and spaces')
self.collection.tags = [' abc']
self._assert_validation_error(
'Tags should not start or end with whitespace')
self.collection.tags = ['abc def']
self._assert_validation_error(
'Adjacent whitespace in tags should be collapsed')
self.collection.tags = ['abc', 'abc']
self._assert_validation_error(
'Expected tags to be unique, but found duplicates')
def test_schema_version_validation(self):
self.collection.schema_version = 'some_schema_version'
self._assert_validation_error('Expected schema version to be an int')
self.collection.schema_version = 100
self._assert_validation_error(
'Expected schema version to be %s' %
feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
def test_nodes_validation(self):
self.collection.nodes = {}
self._assert_validation_error('Expected nodes to be a list')
self.collection.nodes = [
collection_domain.CollectionNode.from_dict({
'exploration_id': '0'
}),
collection_domain.CollectionNode.from_dict({
'exploration_id': '0'
})
]
self._assert_validation_error(
'There are explorations referenced in the collection more than '
'once.')
def test_initial_explorations_validation(self):
# Having no collection nodes is fine for non-strict validation.
self.collection.nodes = []
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation_error(
'Expected to have at least 1 exploration in the collection.')
def test_metadata_validation(self):
self.collection.title = ''
self.collection.objective = ''
self.collection.category = ''
self.collection.nodes = []
# Having no title is fine for non-strict validation.
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation_error(
'A title must be specified for the collection.')
self.collection.title = 'A title'
# Having no objective is fine for non-strict validation.
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation_error(
'An objective must be specified for the collection.')
self.collection.objective = 'An objective'
# Having no category is fine for non-strict validation.
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation_error(
'A category must be specified for the collection.')
self.collection.category = 'A category'
# Having no exploration is fine for non-strict validation.
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation_error(
'Expected to have at least 1 exploration in the collection.')
self.collection.add_node('exp_id_1')
# Now the collection passes both strict and non-strict validation.
self.collection.validate(strict=False)
self.collection.validate(strict=True)
def test_collection_node_exploration_id_validation(self):
# Validate CollectionNode's exploration_id.
collection_node0 = self.collection.get_node('exp_id_0')
collection_node0.exploration_id = 2
self._assert_validation_error('Expected exploration ID to be a string')
def test_is_demo_property(self):
"""Test the is_demo property."""
demo = collection_domain.Collection.create_default_collection('0')
self.assertEqual(demo.is_demo, True)
notdemo1 = collection_domain.Collection.create_default_collection('a')
self.assertEqual(notdemo1.is_demo, False)
notdemo2 = collection_domain.Collection.create_default_collection(
'abcd')
self.assertEqual(notdemo2.is_demo, False)
def test_collection_export_import(self):
"""Test that to_dict and from_dict preserve all data within an
collection.
"""
self.save_new_valid_exploration(
'0', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.create_default_collection(
'0', title='title', category='category', objective='objective')
collection_dict = collection.to_dict()
collection_from_dict = collection_domain.Collection.from_dict(
collection_dict)
self.assertEqual(collection_from_dict.to_dict(), collection_dict)
def test_add_delete_swap_nodes(self):
"""Test that add_node, delete_node and swap_nodes fail in the correct
situations.
"""
collection = collection_domain.Collection.create_default_collection(
'0')
self.assertEqual(len(collection.nodes), 0)
collection.add_node('test_exp')
self.assertEqual(len(collection.nodes), 1)
with self.assertRaisesRegexp(
ValueError,
'Exploration is already part of this collection: test_exp'
):
collection.add_node('test_exp')
collection.add_node('another_exp')
self.assertEqual(len(collection.nodes), 2)
collection.swap_nodes(0, 1)
self.assertEqual(collection.nodes[0].exploration_id, 'another_exp')
self.assertEqual(collection.nodes[1].exploration_id, 'test_exp')
with self.assertRaisesRegexp(
ValueError,
'Both indices point to the same collection node.'
):
collection.swap_nodes(0, 0)
collection.delete_node('another_exp')
self.assertEqual(len(collection.nodes), 1)
with self.assertRaisesRegexp(
ValueError,
'Exploration is not part of this collection: another_exp'
):
collection.delete_node('another_exp')
collection.delete_node('test_exp')
self.assertEqual(len(collection.nodes), 0)
def test_update_collection_contents_from_model(self):
versioned_collection_contents = {
'schema_version': 1,
'collection_contents': {}
}
collection_domain.Collection.update_collection_contents_from_model(
versioned_collection_contents, 1)
self.assertEqual(versioned_collection_contents['schema_version'], 2)
self.assertEqual(
versioned_collection_contents['collection_contents'], {})
def test_update_collection_contents_from_model_with_invalid_schema_version(
self):
versioned_collection_contents = {
'schema_version': feconf.CURRENT_COLLECTION_SCHEMA_VERSION,
'collection_contents': {}
}
with self.assertRaisesRegexp(
Exception,
'Collection is version .+ but current collection schema version '
'is %d' % feconf.CURRENT_COLLECTION_SCHEMA_VERSION):
collection_domain.Collection.update_collection_contents_from_model(
versioned_collection_contents,
feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
class ExplorationGraphUnitTests(test_utils.GenericTestBase):
"""Test the general structure of explorations within a collection."""
def test_initial_explorations(self):
"""Any exploration without prerequisites should be an initial
exploration.
"""
collection = collection_domain.Collection.create_default_collection(
'collection_id')
# If there are no explorations in the collection, there can be no
# initial explorations.
self.assertEqual(collection.nodes, [])
self.assertEqual(collection.first_exploration_id, None)
# A freshly added exploration will be an initial one.
collection.add_node('exp_id_0')
self.assertEqual(collection.first_exploration_id, 'exp_id_0')
# Having prerequisites will make an exploration no longer initial.
collection.add_node('exp_id_1')
self.assertEqual(len(collection.nodes), 2)
self.assertEqual(collection.first_exploration_id, 'exp_id_0')
def test_next_explorations(self):
"""Explorations should be suggested based on their index in the node
list.
"""
collection = collection_domain.Collection.create_default_collection(
'collection_id')
# There should be no next explorations for an empty collection.
self.assertEqual(collection.get_next_exploration_id([]), None)
# If a new exploration is added, the next exploration IDs should be the
# same as the initial exploration.
collection.add_node('exp_id_0')
self.assertEqual(collection.get_next_exploration_id([]), 'exp_id_0')
self.assertEqual(
collection.first_exploration_id,
collection.get_next_exploration_id([]))
# Completing the only exploration of the collection should lead to no
# available explorations thereafter.
self.assertEqual(
collection.get_next_exploration_id(['exp_id_0']), None)
# If another exploration has been added, then the first exploration
# should be the next one to complete.
collection.add_node('exp_id_1')
self.assertEqual(collection.get_next_exploration_id(
['exp_id_0']), 'exp_id_1')
# If another exploration is added, then based on explorations
# completed, the correct exploration should be shown as the next one.
collection.add_node('exp_id_2')
self.assertEqual(
collection.get_next_exploration_id([]), 'exp_id_0')
self.assertEqual(
collection.get_next_exploration_id(['exp_id_0']), 'exp_id_1')
self.assertEqual(
collection.get_next_exploration_id(['exp_id_0', 'exp_id_1']),
'exp_id_2')
# If all explorations have been completed, none should be suggested.
self.assertEqual(
collection.get_next_exploration_id(
['exp_id_0', 'exp_id_1', 'exp_id_2']), None)
def test_next_explorations_in_sequence(self):
collection = collection_domain.Collection.create_default_collection(
'collection_id')
exploration_id = 'exp_id_0'
collection.add_node(exploration_id)
# Completing the only exploration of the collection should lead to no
# available explorations thereafter.
self.assertEqual(
collection.get_next_exploration_id_in_sequence(exploration_id),
None)
collection.add_node('exp_id_1')
collection.add_node('exp_id_2')
self.assertEqual(
collection.get_next_exploration_id_in_sequence(exploration_id),
'exp_id_1')
self.assertEqual(
collection.get_next_exploration_id_in_sequence('exp_id_1'),
'exp_id_2')
def test_nodes_are_in_playble_order(self):
# Create collection.
collection = collection_domain.Collection.create_default_collection(
'collection_id')
# There should be an empty node list in playable order for an empty
# collection.
self.assertEqual(collection.nodes, [])
# Add nodes to collection.
collection.add_node('exp_id_0')
collection.add_node('exp_id_1')
collection.add_node('exp_id_2')
sorted_nodes = collection.nodes
expected_explorations_ids = ['exp_id_0', 'exp_id_1', 'exp_id_2']
observed_exploration_ids = [
node.exploration_id for node in sorted_nodes]
self.assertEqual(expected_explorations_ids, observed_exploration_ids)
def test_next_explorations_with_invalid_exploration_ids(self):
collection = collection_domain.Collection.create_default_collection(
'collection_id')
collection.add_node('exp_id_1')
# There should be one suggested exploration to complete by default.
self.assertEqual(collection.get_next_exploration_id([]), 'exp_id_1')
# If an invalid exploration ID is passed to get_next_exploration_id(),
# it should be ignored. This tests the situation where an exploration
# is deleted from a collection after being completed by a user.
self.assertEqual(
collection.get_next_exploration_id(['fake_exp_id']), 'exp_id_1')
class YamlCreationUnitTests(test_utils.GenericTestBase):
"""Test creation of collections from YAML files."""
COLLECTION_ID = 'a_collection_id'
EXPLORATION_ID = 'an_exploration_id'
def test_yaml_import_and_export(self):
"""Test the from_yaml() and to_yaml() methods."""
self.save_new_valid_exploration(
self.EXPLORATION_ID, 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.create_default_collection(
self.COLLECTION_ID, title='A title', category='A category',
objective='An objective')
collection.add_node(self.EXPLORATION_ID)
self.assertEqual(len(collection.nodes), 1)
collection.validate()
yaml_content = collection.to_yaml()
self.assertEqual(yaml_content, SAMPLE_YAML_CONTENT)
collection2 = collection_domain.Collection.from_yaml(
'collection2', yaml_content)
self.assertEqual(len(collection2.nodes), 1)
yaml_content_2 = collection2.to_yaml()
self.assertEqual(yaml_content_2, yaml_content)
# Should not be able to create a collection from no YAML content.
with self.assertRaises(Exception):
collection_domain.Collection.from_yaml('collection3', None)
def test_from_yaml_with_no_schema_version_specified_raises_error(self):
collection = collection_domain.Collection(
self.COLLECTION_ID, 'title', 'category', 'objective', 'en', [],
None, [], 0)
yaml_content = collection.to_yaml()
with self.assertRaisesRegexp(
Exception, 'Invalid YAML file: no schema version specified.'):
collection_domain.Collection.from_yaml(
self.COLLECTION_ID, yaml_content)
def test_from_yaml_with_invalid_schema_version_raises_error(self):
collection = collection_domain.Collection(
self.COLLECTION_ID, 'title', 'category', 'objective', 'en', [],
0, [], 0)
yaml_content = collection.to_yaml()
with self.assertRaisesRegexp(
Exception,
'Sorry, we can only process v1 to .+ collection YAML files at '
'present.'):
collection_domain.Collection.from_yaml(
self.COLLECTION_ID, yaml_content)
class SchemaMigrationMethodsUnitTests(test_utils.GenericTestBase):
"""Tests the presence of appropriate schema migration methods in the
Collection domain object class.
"""
def test_correct_collection_contents_schema_conversion_methods_exist(self):
"""Test that the right collection_contents schema conversion methods
exist.
"""
current_collection_schema_version = (
feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
for version_num in range(1, current_collection_schema_version):
self.assertTrue(hasattr(
collection_domain.Collection,
'_convert_collection_contents_v%s_dict_to_v%s_dict' % (
version_num, version_num + 1)))
self.assertFalse(hasattr(
collection_domain.Collection,
'_convert_collection_contents_v%s_dict_to_v%s_dict' % (
current_collection_schema_version,
current_collection_schema_version + 1)))
def test_correct_collection_schema_conversion_methods_exist(self):
"""Test that the right collection schema conversion methods exist."""
current_collection_schema_version = (
feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
for version_num in range(1, current_collection_schema_version):
self.assertTrue(hasattr(
collection_domain.Collection,
'_convert_v%s_dict_to_v%s_dict' % (
version_num, version_num + 1)))
self.assertFalse(hasattr(
collection_domain.Collection,
'_convert_v%s_dict_to_v%s_dict' % (
current_collection_schema_version,
current_collection_schema_version + 1)))
class SchemaMigrationUnitTests(test_utils.GenericTestBase):
"""Test migration methods for yaml content."""
YAML_CONTENT_V1 = ("""category: A category
nodes:
- acquired_skills:
- Skill1
- Skill2
exploration_id: Exp1
prerequisite_skills: []
- acquired_skills: []
exploration_id: Exp2
prerequisite_skills:
- Skill1
objective: ''
schema_version: 1
title: A title
""")
YAML_CONTENT_V2 = ("""category: A category
language_code: en
nodes:
- acquired_skills:
- Skill1
- Skill2
exploration_id: Exp1
prerequisite_skills: []
- acquired_skills: []
exploration_id: Exp2
prerequisite_skills:
- Skill1
objective: ''
schema_version: 2
tags: []
title: A title
""")
YAML_CONTENT_V3 = ("""category: A category
language_code: en
nodes:
- acquired_skills:
- Skill1
- Skill2
exploration_id: Exp1
prerequisite_skills: []
- acquired_skills: []
exploration_id: Exp2
prerequisite_skills:
- Skill1
objective: ''
schema_version: 2
tags: []
title: A title
""")
YAML_CONTENT_V4 = ("""category: A category
language_code: en
next_skill_id: 2
nodes:
- acquired_skill_ids:
- skill0
- skill1
exploration_id: Exp1
prerequisite_skill_ids: []
- acquired_skill_ids: []
exploration_id: Exp2
prerequisite_skill_ids:
- skill0
objective: ''
schema_version: 4
skills:
skill0:
name: Skill1
question_ids: []
skill1:
name: Skill2
question_ids: []
tags: []
title: A title
""")
YAML_CONTENT_V5 = ("""category: A category
language_code: en
next_skill_index: 2
nodes:
- acquired_skill_ids:
- skill0
- skill1
exploration_id: Exp1
prerequisite_skill_ids: []
- acquired_skill_ids: []
exploration_id: Exp2
prerequisite_skill_ids:
- skill0
objective: ''
schema_version: 5
skills:
skill0:
name: Skill1
question_ids: []
skill1:
name: Skill2
question_ids: []
tags: []
title: A title
""")
YAML_CONTENT_V6 = ("""category: A category
language_code: en
nodes:
- exploration_id: Exp1
- exploration_id: Exp2
objective: ''
schema_version: 6
tags: []
title: A title
""")
_LATEST_YAML_CONTENT = YAML_CONTENT_V6
def test_load_from_v1(self):
"""Test direct loading from a v1 yaml file."""
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V1)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v2(self):
"""Test direct loading from a v2 yaml file."""
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V2)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v3(self):
"""Test direct loading from a v3 yaml file."""
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V3)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v4(self):
"""Test direct loading from a v4 yaml file."""
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V4)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v5(self):
"""Test direct loading from a v5 yaml file."""
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V5)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v6(self):
"""Test direct loading from a v6 yaml file."""
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V6)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
class CollectionSummaryTests(test_utils.GenericTestBase):
def test_collection_summary_gets_created(self):
collection_summary_dict = {
'category': 'category',
'status': 'status',
'community_owned': 'True',
'viewer_ids': ['viewer_id'],
'version': 1,
'editor_ids': ['editor_id'],
'title': 'title',
'collection_model_created_on': {},
'tags': [],
'collection_model_last_updated': {},
'contributor_ids': ['contributor_id'],
'language_code': 'en',
'objective': 'objective',
'contributors_summary': {},
'id': 'col_id',
'owner_ids': ['owner_id']
}
collection_summary = collection_domain.CollectionSummary(
'col_id', 'title', 'category', 'objective', 'en', [], 'status',
'True', ['owner_id'], ['editor_id'], ['viewer_id'],
['contributor_id'], {}, 1, 1, {}, {})
self.assertEqual(collection_summary.to_dict(), collection_summary_dict)
| 36.629397 | 79 | 0.673732 |
from core.domain import collection_domain
from core.domain import collection_services
from core.tests import test_utils
import feconf
import utils
SAMPLE_YAML_CONTENT = ("""category: A category
language_code: en
nodes:
- exploration_id: an_exploration_id
objective: An objective
schema_version: %d
tags: []
title: A title
""") % (feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
class CollectionChangeTests(test_utils.GenericTestBase):
def test_collection_change_object_with_invalid_change_dict(self):
with self.assertRaisesRegexp(Exception, 'Invalid change_dict:'):
collection_domain.CollectionChange({'invalid_cmd': 'data'})
with self.assertRaisesRegexp(Exception, 'Invalid change_dict:'):
collection_domain.CollectionChange({
'cmd': 'edit_collection_property',
'property_name': 'invalid_property_name',
})
with self.assertRaisesRegexp(Exception, 'Invalid change_dict:'):
collection_domain.CollectionChange({
'cmd': 'invalid_cmd'
})
def test_collection_change_object_with_swap_nodes(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'swap_nodes',
'first_index': 'first_index',
'second_index': 'second_index'
})
self.assertEqual(col_change_object.first_index, 'first_index')
self.assertEqual(col_change_object.second_index, 'second_index')
def test_collection_change_object_with_edit_collection_node_property(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'edit_collection_node_property',
'exploration_id': 'exploration_id',
'property_name': 'property_name',
'new_value': 'new_value',
'old_value': 'old_value'
})
self.assertEqual(col_change_object.exploration_id, 'exploration_id')
self.assertEqual(col_change_object.property_name, 'property_name')
self.assertEqual(col_change_object.new_value, 'new_value')
self.assertEqual(col_change_object.old_value, 'old_value')
def test_collection_change_object_with_add_collection_skill(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'add_collection_skill',
'name': 'name'
})
self.assertEqual(col_change_object.name, 'name')
def test_collection_change_object_with_add_question_id_to_skill(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'add_question_id_to_skill',
'skill_id': 'skill_id',
'question_id': 'question_id'
})
self.assertEqual(col_change_object.skill_id, 'skill_id')
self.assertEqual(col_change_object.question_id, 'question_id')
def test_collection_change_object_with_remove_question_id_from_skill(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'remove_question_id_from_skill',
'skill_id': 'skill_id',
'question_id': 'question_id'
})
self.assertEqual(col_change_object.skill_id, 'skill_id')
self.assertEqual(col_change_object.question_id, 'question_id')
def test_collection_change_object_with_delete_collection_skill(self):
col_change_object = collection_domain.CollectionChange({
'cmd': 'delete_collection_skill',
'skill_id': 'skill_id',
})
self.assertEqual(col_change_object.skill_id, 'skill_id')
class CollectionDomainUnitTests(test_utils.GenericTestBase):
COLLECTION_ID = 'collection_id'
EXPLORATION_ID = 'exp_id_0'
def setUp(self):
super(CollectionDomainUnitTests, self).setUp()
self.save_new_valid_collection(
self.COLLECTION_ID, 'user@example.com', title='Title',
category='Category', objective='Objective',
exploration_id=self.EXPLORATION_ID)
self.collection = collection_services.get_collection_by_id(
self.COLLECTION_ID)
def _assert_validation_error(self, expected_error_substring):
with self.assertRaisesRegexp(
utils.ValidationError, expected_error_substring):
self.collection.validate()
def test_initial_validation(self):
self.collection.validate()
def test_title_validation(self):
self.collection.title = 0
self._assert_validation_error('Expected title to be a string')
def test_category_validation(self):
self.collection.category = 0
self._assert_validation_error('Expected category to be a string')
def test_objective_validation(self):
self.collection.objective = ''
self._assert_validation_error('objective must be specified')
self.collection.objective = 0
self._assert_validation_error('Expected objective to be a string')
def test_language_code_validation(self):
self.collection.language_code = ''
self._assert_validation_error('language must be specified')
self.collection.language_code = 0
self._assert_validation_error('Expected language code to be a string')
self.collection.language_code = 'xz'
self._assert_validation_error('Invalid language code')
def test_tags_validation(self):
self.collection.tags = 'abc'
self._assert_validation_error('Expected tags to be a list')
self.collection.tags = [2, 3]
self._assert_validation_error('Expected each tag to be a string')
self.collection.tags = ['', 'tag']
self._assert_validation_error('Tags should be non-empty')
self.collection.tags = ['234']
self._assert_validation_error(
'Tags should only contain lowercase letters and spaces')
self.collection.tags = [' abc']
self._assert_validation_error(
'Tags should not start or end with whitespace')
self.collection.tags = ['abc def']
self._assert_validation_error(
'Adjacent whitespace in tags should be collapsed')
self.collection.tags = ['abc', 'abc']
self._assert_validation_error(
'Expected tags to be unique, but found duplicates')
def test_schema_version_validation(self):
self.collection.schema_version = 'some_schema_version'
self._assert_validation_error('Expected schema version to be an int')
self.collection.schema_version = 100
self._assert_validation_error(
'Expected schema version to be %s' %
feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
def test_nodes_validation(self):
self.collection.nodes = {}
self._assert_validation_error('Expected nodes to be a list')
self.collection.nodes = [
collection_domain.CollectionNode.from_dict({
'exploration_id': '0'
}),
collection_domain.CollectionNode.from_dict({
'exploration_id': '0'
})
]
self._assert_validation_error(
'There are explorations referenced in the collection more than '
'once.')
def test_initial_explorations_validation(self):
self.collection.nodes = []
self.collection.validate(strict=False)
self._assert_validation_error(
'Expected to have at least 1 exploration in the collection.')
def test_metadata_validation(self):
self.collection.title = ''
self.collection.objective = ''
self.collection.category = ''
self.collection.nodes = []
# Having no title is fine for non-strict validation.
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation_error(
'A title must be specified for the collection.')
self.collection.title = 'A title'
self.collection.validate(strict=False)
self._assert_validation_error(
'An objective must be specified for the collection.')
self.collection.objective = 'An objective'
# Having no category is fine for non-strict validation.
self.collection.validate(strict=False)
# But it's not okay for strict validation.
self._assert_validation_error(
'A category must be specified for the collection.')
self.collection.category = 'A category'
self.collection.validate(strict=False)
self._assert_validation_error(
'Expected to have at least 1 exploration in the collection.')
self.collection.add_node('exp_id_1')
# Now the collection passes both strict and non-strict validation.
self.collection.validate(strict=False)
self.collection.validate(strict=True)
def test_collection_node_exploration_id_validation(self):
# Validate CollectionNode's exploration_id.
collection_node0 = self.collection.get_node('exp_id_0')
collection_node0.exploration_id = 2
self._assert_validation_error('Expected exploration ID to be a string')
def test_is_demo_property(self):
demo = collection_domain.Collection.create_default_collection('0')
self.assertEqual(demo.is_demo, True)
notdemo1 = collection_domain.Collection.create_default_collection('a')
self.assertEqual(notdemo1.is_demo, False)
notdemo2 = collection_domain.Collection.create_default_collection(
'abcd')
self.assertEqual(notdemo2.is_demo, False)
def test_collection_export_import(self):
self.save_new_valid_exploration(
'0', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.create_default_collection(
'0', title='title', category='category', objective='objective')
collection_dict = collection.to_dict()
collection_from_dict = collection_domain.Collection.from_dict(
collection_dict)
self.assertEqual(collection_from_dict.to_dict(), collection_dict)
def test_add_delete_swap_nodes(self):
collection = collection_domain.Collection.create_default_collection(
'0')
self.assertEqual(len(collection.nodes), 0)
collection.add_node('test_exp')
self.assertEqual(len(collection.nodes), 1)
with self.assertRaisesRegexp(
ValueError,
'Exploration is already part of this collection: test_exp'
):
collection.add_node('test_exp')
collection.add_node('another_exp')
self.assertEqual(len(collection.nodes), 2)
collection.swap_nodes(0, 1)
self.assertEqual(collection.nodes[0].exploration_id, 'another_exp')
self.assertEqual(collection.nodes[1].exploration_id, 'test_exp')
with self.assertRaisesRegexp(
ValueError,
'Both indices point to the same collection node.'
):
collection.swap_nodes(0, 0)
collection.delete_node('another_exp')
self.assertEqual(len(collection.nodes), 1)
with self.assertRaisesRegexp(
ValueError,
'Exploration is not part of this collection: another_exp'
):
collection.delete_node('another_exp')
collection.delete_node('test_exp')
self.assertEqual(len(collection.nodes), 0)
def test_update_collection_contents_from_model(self):
versioned_collection_contents = {
'schema_version': 1,
'collection_contents': {}
}
collection_domain.Collection.update_collection_contents_from_model(
versioned_collection_contents, 1)
self.assertEqual(versioned_collection_contents['schema_version'], 2)
self.assertEqual(
versioned_collection_contents['collection_contents'], {})
def test_update_collection_contents_from_model_with_invalid_schema_version(
self):
versioned_collection_contents = {
'schema_version': feconf.CURRENT_COLLECTION_SCHEMA_VERSION,
'collection_contents': {}
}
with self.assertRaisesRegexp(
Exception,
'Collection is version .+ but current collection schema version '
'is %d' % feconf.CURRENT_COLLECTION_SCHEMA_VERSION):
collection_domain.Collection.update_collection_contents_from_model(
versioned_collection_contents,
feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
class ExplorationGraphUnitTests(test_utils.GenericTestBase):
def test_initial_explorations(self):
collection = collection_domain.Collection.create_default_collection(
'collection_id')
self.assertEqual(collection.nodes, [])
self.assertEqual(collection.first_exploration_id, None)
collection.add_node('exp_id_0')
self.assertEqual(collection.first_exploration_id, 'exp_id_0')
collection.add_node('exp_id_1')
self.assertEqual(len(collection.nodes), 2)
self.assertEqual(collection.first_exploration_id, 'exp_id_0')
def test_next_explorations(self):
collection = collection_domain.Collection.create_default_collection(
'collection_id')
self.assertEqual(collection.get_next_exploration_id([]), None)
collection.add_node('exp_id_0')
self.assertEqual(collection.get_next_exploration_id([]), 'exp_id_0')
self.assertEqual(
collection.first_exploration_id,
collection.get_next_exploration_id([]))
self.assertEqual(
collection.get_next_exploration_id(['exp_id_0']), None)
collection.add_node('exp_id_1')
self.assertEqual(collection.get_next_exploration_id(
['exp_id_0']), 'exp_id_1')
collection.add_node('exp_id_2')
self.assertEqual(
collection.get_next_exploration_id([]), 'exp_id_0')
self.assertEqual(
collection.get_next_exploration_id(['exp_id_0']), 'exp_id_1')
self.assertEqual(
collection.get_next_exploration_id(['exp_id_0', 'exp_id_1']),
'exp_id_2')
self.assertEqual(
collection.get_next_exploration_id(
['exp_id_0', 'exp_id_1', 'exp_id_2']), None)
def test_next_explorations_in_sequence(self):
collection = collection_domain.Collection.create_default_collection(
'collection_id')
exploration_id = 'exp_id_0'
collection.add_node(exploration_id)
self.assertEqual(
collection.get_next_exploration_id_in_sequence(exploration_id),
None)
collection.add_node('exp_id_1')
collection.add_node('exp_id_2')
self.assertEqual(
collection.get_next_exploration_id_in_sequence(exploration_id),
'exp_id_1')
self.assertEqual(
collection.get_next_exploration_id_in_sequence('exp_id_1'),
'exp_id_2')
def test_nodes_are_in_playble_order(self):
collection = collection_domain.Collection.create_default_collection(
'collection_id')
self.assertEqual(collection.nodes, [])
collection.add_node('exp_id_0')
collection.add_node('exp_id_1')
collection.add_node('exp_id_2')
sorted_nodes = collection.nodes
expected_explorations_ids = ['exp_id_0', 'exp_id_1', 'exp_id_2']
observed_exploration_ids = [
node.exploration_id for node in sorted_nodes]
self.assertEqual(expected_explorations_ids, observed_exploration_ids)
def test_next_explorations_with_invalid_exploration_ids(self):
collection = collection_domain.Collection.create_default_collection(
'collection_id')
collection.add_node('exp_id_1')
self.assertEqual(collection.get_next_exploration_id([]), 'exp_id_1')
self.assertEqual(
collection.get_next_exploration_id(['fake_exp_id']), 'exp_id_1')
class YamlCreationUnitTests(test_utils.GenericTestBase):
COLLECTION_ID = 'a_collection_id'
EXPLORATION_ID = 'an_exploration_id'
def test_yaml_import_and_export(self):
self.save_new_valid_exploration(
self.EXPLORATION_ID, 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.create_default_collection(
self.COLLECTION_ID, title='A title', category='A category',
objective='An objective')
collection.add_node(self.EXPLORATION_ID)
self.assertEqual(len(collection.nodes), 1)
collection.validate()
yaml_content = collection.to_yaml()
self.assertEqual(yaml_content, SAMPLE_YAML_CONTENT)
collection2 = collection_domain.Collection.from_yaml(
'collection2', yaml_content)
self.assertEqual(len(collection2.nodes), 1)
yaml_content_2 = collection2.to_yaml()
self.assertEqual(yaml_content_2, yaml_content)
with self.assertRaises(Exception):
collection_domain.Collection.from_yaml('collection3', None)
def test_from_yaml_with_no_schema_version_specified_raises_error(self):
collection = collection_domain.Collection(
self.COLLECTION_ID, 'title', 'category', 'objective', 'en', [],
None, [], 0)
yaml_content = collection.to_yaml()
with self.assertRaisesRegexp(
Exception, 'Invalid YAML file: no schema version specified.'):
collection_domain.Collection.from_yaml(
self.COLLECTION_ID, yaml_content)
def test_from_yaml_with_invalid_schema_version_raises_error(self):
collection = collection_domain.Collection(
self.COLLECTION_ID, 'title', 'category', 'objective', 'en', [],
0, [], 0)
yaml_content = collection.to_yaml()
with self.assertRaisesRegexp(
Exception,
'Sorry, we can only process v1 to .+ collection YAML files at '
'present.'):
collection_domain.Collection.from_yaml(
self.COLLECTION_ID, yaml_content)
class SchemaMigrationMethodsUnitTests(test_utils.GenericTestBase):
def test_correct_collection_contents_schema_conversion_methods_exist(self):
current_collection_schema_version = (
feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
for version_num in range(1, current_collection_schema_version):
self.assertTrue(hasattr(
collection_domain.Collection,
'_convert_collection_contents_v%s_dict_to_v%s_dict' % (
version_num, version_num + 1)))
self.assertFalse(hasattr(
collection_domain.Collection,
'_convert_collection_contents_v%s_dict_to_v%s_dict' % (
current_collection_schema_version,
current_collection_schema_version + 1)))
def test_correct_collection_schema_conversion_methods_exist(self):
current_collection_schema_version = (
feconf.CURRENT_COLLECTION_SCHEMA_VERSION)
for version_num in range(1, current_collection_schema_version):
self.assertTrue(hasattr(
collection_domain.Collection,
'_convert_v%s_dict_to_v%s_dict' % (
version_num, version_num + 1)))
self.assertFalse(hasattr(
collection_domain.Collection,
'_convert_v%s_dict_to_v%s_dict' % (
current_collection_schema_version,
current_collection_schema_version + 1)))
class SchemaMigrationUnitTests(test_utils.GenericTestBase):
YAML_CONTENT_V1 = ("""category: A category
nodes:
- acquired_skills:
- Skill1
- Skill2
exploration_id: Exp1
prerequisite_skills: []
- acquired_skills: []
exploration_id: Exp2
prerequisite_skills:
- Skill1
objective: ''
schema_version: 1
title: A title
""")
YAML_CONTENT_V2 = ("""category: A category
language_code: en
nodes:
- acquired_skills:
- Skill1
- Skill2
exploration_id: Exp1
prerequisite_skills: []
- acquired_skills: []
exploration_id: Exp2
prerequisite_skills:
- Skill1
objective: ''
schema_version: 2
tags: []
title: A title
""")
YAML_CONTENT_V3 = ("""category: A category
language_code: en
nodes:
- acquired_skills:
- Skill1
- Skill2
exploration_id: Exp1
prerequisite_skills: []
- acquired_skills: []
exploration_id: Exp2
prerequisite_skills:
- Skill1
objective: ''
schema_version: 2
tags: []
title: A title
""")
YAML_CONTENT_V4 = ("""category: A category
language_code: en
next_skill_id: 2
nodes:
- acquired_skill_ids:
- skill0
- skill1
exploration_id: Exp1
prerequisite_skill_ids: []
- acquired_skill_ids: []
exploration_id: Exp2
prerequisite_skill_ids:
- skill0
objective: ''
schema_version: 4
skills:
skill0:
name: Skill1
question_ids: []
skill1:
name: Skill2
question_ids: []
tags: []
title: A title
""")
YAML_CONTENT_V5 = ("""category: A category
language_code: en
next_skill_index: 2
nodes:
- acquired_skill_ids:
- skill0
- skill1
exploration_id: Exp1
prerequisite_skill_ids: []
- acquired_skill_ids: []
exploration_id: Exp2
prerequisite_skill_ids:
- skill0
objective: ''
schema_version: 5
skills:
skill0:
name: Skill1
question_ids: []
skill1:
name: Skill2
question_ids: []
tags: []
title: A title
""")
YAML_CONTENT_V6 = ("""category: A category
language_code: en
nodes:
- exploration_id: Exp1
- exploration_id: Exp2
objective: ''
schema_version: 6
tags: []
title: A title
""")
_LATEST_YAML_CONTENT = YAML_CONTENT_V6
def test_load_from_v1(self):
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V1)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v2(self):
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V2)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v3(self):
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V3)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v4(self):
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V4)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v5(self):
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V5)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
def test_load_from_v6(self):
self.save_new_valid_exploration(
'Exp1', 'user@example.com', end_state_name='End')
collection = collection_domain.Collection.from_yaml(
'cid', self.YAML_CONTENT_V6)
self.assertEqual(collection.to_yaml(), self._LATEST_YAML_CONTENT)
class CollectionSummaryTests(test_utils.GenericTestBase):
def test_collection_summary_gets_created(self):
collection_summary_dict = {
'category': 'category',
'status': 'status',
'community_owned': 'True',
'viewer_ids': ['viewer_id'],
'version': 1,
'editor_ids': ['editor_id'],
'title': 'title',
'collection_model_created_on': {},
'tags': [],
'collection_model_last_updated': {},
'contributor_ids': ['contributor_id'],
'language_code': 'en',
'objective': 'objective',
'contributors_summary': {},
'id': 'col_id',
'owner_ids': ['owner_id']
}
collection_summary = collection_domain.CollectionSummary(
'col_id', 'title', 'category', 'objective', 'en', [], 'status',
'True', ['owner_id'], ['editor_id'], ['viewer_id'],
['contributor_id'], {}, 1, 1, {}, {})
self.assertEqual(collection_summary.to_dict(), collection_summary_dict)
| true | true |
1c3833377ac441e8cf3d5885d193dc72b8a361ad | 64 | py | Python | bokeh_metaplot/__init__.py | doomsplayer/bokeh-metaplot | e5f74017a6ed6c60b6a37a87a071ea54ab882f9c | [
"MIT"
] | null | null | null | bokeh_metaplot/__init__.py | doomsplayer/bokeh-metaplot | e5f74017a6ed6c60b6a37a87a071ea54ab882f9c | [
"MIT"
] | null | null | null | bokeh_metaplot/__init__.py | doomsplayer/bokeh-metaplot | e5f74017a6ed6c60b6a37a87a071ea54ab882f9c | [
"MIT"
] | null | null | null | from .metaplot import MetaPlot, MetaChart, MetaFigure, metaize
| 21.333333 | 62 | 0.8125 | from .metaplot import MetaPlot, MetaChart, MetaFigure, metaize
| true | true |
1c38333a94e6ed4e43e327d6343bda2f1d029216 | 10,159 | py | Python | prompt_toolkit/input/vt100.py | mark64/python-prompt-toolkit | df768003df167382f3a11a73d800a4555ca1157c | [
"BSD-3-Clause"
] | 2 | 2020-05-29T17:51:10.000Z | 2020-05-29T17:52:26.000Z | prompt_toolkit/input/vt100.py | vxgmichel/python-prompt-toolkit | a49c486fa5ae6ad5a38ff3c1050f9c9609acac1d | [
"BSD-3-Clause"
] | null | null | null | prompt_toolkit/input/vt100.py | vxgmichel/python-prompt-toolkit | a49c486fa5ae6ad5a38ff3c1050f9c9609acac1d | [
"BSD-3-Clause"
] | null | null | null | import contextlib
import io
import os
import sys
import termios
import tty
from asyncio import AbstractEventLoop, get_event_loop
from typing import (
Callable,
ContextManager,
Dict,
Generator,
List,
Optional,
Set,
TextIO,
Tuple,
Union,
)
from prompt_toolkit.utils import is_dumb_terminal
from ..key_binding import KeyPress
from .base import Input
from .posix_utils import PosixStdinReader
from .vt100_parser import Vt100Parser
__all__ = [
"Vt100Input",
"raw_mode",
"cooked_mode",
]
class Vt100Input(Input):
"""
Vt100 input for Posix systems.
(This uses a posix file descriptor that can be registered in the event loop.)
"""
# For the error messages. Only display "Input is not a terminal" once per
# file descriptor.
_fds_not_a_terminal: Set[int] = set()
def __init__(self, stdin: TextIO) -> None:
# Test whether the given input object has a file descriptor.
# (Idle reports stdin to be a TTY, but fileno() is not implemented.)
try:
# This should not raise, but can return 0.
stdin.fileno()
except io.UnsupportedOperation:
if "idlelib.run" in sys.modules:
raise io.UnsupportedOperation(
"Stdin is not a terminal. Running from Idle is not supported."
)
else:
raise io.UnsupportedOperation("Stdin is not a terminal.")
# Even when we have a file descriptor, it doesn't mean it's a TTY.
# Normally, this requires a real TTY device, but people instantiate
# this class often during unit tests as well. They use for instance
# pexpect to pipe data into an application. For convenience, we print
# an error message and go on.
isatty = stdin.isatty()
fd = stdin.fileno()
if not isatty and fd not in Vt100Input._fds_not_a_terminal:
msg = "Warning: Input is not a terminal (fd=%r).\n"
sys.stderr.write(msg % fd)
sys.stderr.flush()
Vt100Input._fds_not_a_terminal.add(fd)
#
self.stdin = stdin
# Create a backup of the fileno(). We want this to work even if the
# underlying file is closed, so that `typeahead_hash()` keeps working.
self._fileno = stdin.fileno()
self._buffer: List[KeyPress] = [] # Buffer to collect the Key objects.
self.stdin_reader = PosixStdinReader(self._fileno, encoding=stdin.encoding)
self.vt100_parser = Vt100Parser(
lambda key_press: self._buffer.append(key_press)
)
@property
def responds_to_cpr(self) -> bool:
# When the input is a tty, we assume that CPR is supported.
# It's not when the input is piped from Pexpect.
if os.environ.get("PROMPT_TOOLKIT_NO_CPR", "") == "1":
return False
if is_dumb_terminal():
return False
try:
return self.stdin.isatty()
except ValueError:
return False # ValueError: I/O operation on closed file
def attach(self, input_ready_callback: Callable[[], None]) -> ContextManager[None]:
"""
Return a context manager that makes this input active in the current
event loop.
"""
return _attached_input(self, input_ready_callback)
def detach(self) -> ContextManager[None]:
"""
Return a context manager that makes sure that this input is not active
in the current event loop.
"""
return _detached_input(self)
def read_keys(self) -> List[KeyPress]:
" Read list of KeyPress. "
# Read text from stdin.
data = self.stdin_reader.read()
# Pass it through our vt100 parser.
self.vt100_parser.feed(data)
# Return result.
result = self._buffer
self._buffer = []
return result
def flush_keys(self) -> List[KeyPress]:
"""
Flush pending keys and return them.
(Used for flushing the 'escape' key.)
"""
# Flush all pending keys. (This is most important to flush the vt100
# 'Escape' key early when nothing else follows.)
self.vt100_parser.flush()
# Return result.
result = self._buffer
self._buffer = []
return result
@property
def closed(self) -> bool:
return self.stdin_reader.closed
def raw_mode(self) -> ContextManager[None]:
return raw_mode(self.stdin.fileno())
def cooked_mode(self) -> ContextManager[None]:
return cooked_mode(self.stdin.fileno())
def fileno(self) -> int:
return self.stdin.fileno()
def typeahead_hash(self) -> str:
return "fd-%s" % (self._fileno,)
_current_callbacks: Dict[
Tuple[AbstractEventLoop, int], Optional[Callable[[], None]]
] = {} # (loop, fd) -> current callback
@contextlib.contextmanager
def _attached_input(
input: Vt100Input, callback: Callable[[], None]
) -> Generator[None, None, None]:
"""
Context manager that makes this input active in the current event loop.
:param input: :class:`~prompt_toolkit.input.Input` object.
:param callback: Called when the input is ready to read.
"""
loop = get_event_loop()
fd = input.fileno()
previous = _current_callbacks.get((loop, fd))
loop.add_reader(fd, callback)
_current_callbacks[loop, fd] = callback
try:
yield
finally:
loop.remove_reader(fd)
if previous:
loop.add_reader(fd, previous)
_current_callbacks[loop, fd] = previous
else:
del _current_callbacks[loop, fd]
@contextlib.contextmanager
def _detached_input(input: Vt100Input) -> Generator[None, None, None]:
loop = get_event_loop()
fd = input.fileno()
previous = _current_callbacks.get((loop, fd))
if previous:
loop.remove_reader(fd)
_current_callbacks[loop, fd] = None
try:
yield
finally:
if previous:
loop.add_reader(fd, previous)
_current_callbacks[loop, fd] = previous
class raw_mode:
"""
::
with raw_mode(stdin):
''' the pseudo-terminal stdin is now used in raw mode '''
We ignore errors when executing `tcgetattr` fails.
"""
# There are several reasons for ignoring errors:
# 1. To avoid the "Inappropriate ioctl for device" crash if somebody would
# execute this code (In a Python REPL, for instance):
#
# import os; f = open(os.devnull); os.dup2(f.fileno(), 0)
#
# The result is that the eventloop will stop correctly, because it has
# to logic to quit when stdin is closed. However, we should not fail at
# this point. See:
# https://github.com/jonathanslenders/python-prompt-toolkit/pull/393
# https://github.com/jonathanslenders/python-prompt-toolkit/issues/392
# 2. Related, when stdin is an SSH pipe, and no full terminal was allocated.
# See: https://github.com/jonathanslenders/python-prompt-toolkit/pull/165
def __init__(self, fileno: int) -> None:
self.fileno = fileno
self.attrs_before: Optional[List[Union[int, List[bytes]]]]
try:
self.attrs_before = termios.tcgetattr(fileno)
except termios.error:
# Ignore attribute errors.
self.attrs_before = None
def __enter__(self) -> None:
# NOTE: On os X systems, using pty.setraw() fails. Therefor we are using this:
try:
newattr = termios.tcgetattr(self.fileno)
except termios.error:
pass
else:
newattr[tty.LFLAG] = self._patch_lflag(newattr[tty.LFLAG])
newattr[tty.IFLAG] = self._patch_iflag(newattr[tty.IFLAG])
# VMIN defines the number of characters read at a time in
# non-canonical mode. It seems to default to 1 on Linux, but on
# Solaris and derived operating systems it defaults to 4. (This is
# because the VMIN slot is the same as the VEOF slot, which
# defaults to ASCII EOT = Ctrl-D = 4.)
newattr[tty.CC][termios.VMIN] = 1 # type: ignore
termios.tcsetattr(self.fileno, termios.TCSANOW, newattr)
# Put the terminal in cursor mode. (Instead of application mode.)
os.write(self.fileno, b"\x1b[?1l")
@classmethod
def _patch_lflag(cls, attrs):
return attrs & ~(termios.ECHO | termios.ICANON | termios.IEXTEN | termios.ISIG)
@classmethod
def _patch_iflag(cls, attrs):
return attrs & ~(
# Disable XON/XOFF flow control on output and input.
# (Don't capture Ctrl-S and Ctrl-Q.)
# Like executing: "stty -ixon."
termios.IXON
| termios.IXOFF
|
# Don't translate carriage return into newline on input.
termios.ICRNL
| termios.INLCR
| termios.IGNCR
)
def __exit__(self, *a: object) -> None:
if self.attrs_before is not None:
try:
termios.tcsetattr(self.fileno, termios.TCSANOW, self.attrs_before)
except termios.error:
pass
# # Put the terminal in application mode.
# self._stdout.write('\x1b[?1h')
class cooked_mode(raw_mode):
"""
The opposite of ``raw_mode``, used when we need cooked mode inside a
`raw_mode` block. Used in `Application.run_in_terminal`.::
with cooked_mode(stdin):
''' the pseudo-terminal stdin is now used in cooked mode. '''
"""
@classmethod
def _patch_lflag(cls, attrs):
return attrs | (termios.ECHO | termios.ICANON | termios.IEXTEN | termios.ISIG)
@classmethod
def _patch_iflag(cls, attrs):
# Turn the ICRNL flag back on. (Without this, calling `input()` in
# run_in_terminal doesn't work and displays ^M instead. Ptpython
# evaluates commands using `run_in_terminal`, so it's important that
# they translate ^M back into ^J.)
return attrs | termios.ICRNL
| 32.353503 | 87 | 0.617974 | import contextlib
import io
import os
import sys
import termios
import tty
from asyncio import AbstractEventLoop, get_event_loop
from typing import (
Callable,
ContextManager,
Dict,
Generator,
List,
Optional,
Set,
TextIO,
Tuple,
Union,
)
from prompt_toolkit.utils import is_dumb_terminal
from ..key_binding import KeyPress
from .base import Input
from .posix_utils import PosixStdinReader
from .vt100_parser import Vt100Parser
__all__ = [
"Vt100Input",
"raw_mode",
"cooked_mode",
]
class Vt100Input(Input):
_fds_not_a_terminal: Set[int] = set()
def __init__(self, stdin: TextIO) -> None:
try:
stdin.fileno()
except io.UnsupportedOperation:
if "idlelib.run" in sys.modules:
raise io.UnsupportedOperation(
"Stdin is not a terminal. Running from Idle is not supported."
)
else:
raise io.UnsupportedOperation("Stdin is not a terminal.")
isatty = stdin.isatty()
fd = stdin.fileno()
if not isatty and fd not in Vt100Input._fds_not_a_terminal:
msg = "Warning: Input is not a terminal (fd=%r).\n"
sys.stderr.write(msg % fd)
sys.stderr.flush()
Vt100Input._fds_not_a_terminal.add(fd)
self.stdin = stdin
self._fileno = stdin.fileno()
self._buffer: List[KeyPress] = []
self.stdin_reader = PosixStdinReader(self._fileno, encoding=stdin.encoding)
self.vt100_parser = Vt100Parser(
lambda key_press: self._buffer.append(key_press)
)
@property
def responds_to_cpr(self) -> bool:
if os.environ.get("PROMPT_TOOLKIT_NO_CPR", "") == "1":
return False
if is_dumb_terminal():
return False
try:
return self.stdin.isatty()
except ValueError:
return False # ValueError: I/O operation on closed file
def attach(self, input_ready_callback: Callable[[], None]) -> ContextManager[None]:
return _attached_input(self, input_ready_callback)
def detach(self) -> ContextManager[None]:
return _detached_input(self)
def read_keys(self) -> List[KeyPress]:
# Read text from stdin.
data = self.stdin_reader.read()
# Pass it through our vt100 parser.
self.vt100_parser.feed(data)
# Return result.
result = self._buffer
self._buffer = []
return result
def flush_keys(self) -> List[KeyPress]:
# Flush all pending keys. (This is most important to flush the vt100
# 'Escape' key early when nothing else follows.)
self.vt100_parser.flush()
# Return result.
result = self._buffer
self._buffer = []
return result
@property
def closed(self) -> bool:
return self.stdin_reader.closed
def raw_mode(self) -> ContextManager[None]:
return raw_mode(self.stdin.fileno())
def cooked_mode(self) -> ContextManager[None]:
return cooked_mode(self.stdin.fileno())
def fileno(self) -> int:
return self.stdin.fileno()
def typeahead_hash(self) -> str:
return "fd-%s" % (self._fileno,)
_current_callbacks: Dict[
Tuple[AbstractEventLoop, int], Optional[Callable[[], None]]
] = {} # (loop, fd) -> current callback
@contextlib.contextmanager
def _attached_input(
input: Vt100Input, callback: Callable[[], None]
) -> Generator[None, None, None]:
loop = get_event_loop()
fd = input.fileno()
previous = _current_callbacks.get((loop, fd))
loop.add_reader(fd, callback)
_current_callbacks[loop, fd] = callback
try:
yield
finally:
loop.remove_reader(fd)
if previous:
loop.add_reader(fd, previous)
_current_callbacks[loop, fd] = previous
else:
del _current_callbacks[loop, fd]
@contextlib.contextmanager
def _detached_input(input: Vt100Input) -> Generator[None, None, None]:
loop = get_event_loop()
fd = input.fileno()
previous = _current_callbacks.get((loop, fd))
if previous:
loop.remove_reader(fd)
_current_callbacks[loop, fd] = None
try:
yield
finally:
if previous:
loop.add_reader(fd, previous)
_current_callbacks[loop, fd] = previous
class raw_mode:
# There are several reasons for ignoring errors:
# 1. To avoid the "Inappropriate ioctl for device" crash if somebody would
# execute this code (In a Python REPL, for instance):
#
# import os; f = open(os.devnull); os.dup2(f.fileno(), 0)
#
# The result is that the eventloop will stop correctly, because it has
# to logic to quit when stdin is closed. However, we should not fail at
# this point. See:
# https://github.com/jonathanslenders/python-prompt-toolkit/pull/393
# https://github.com/jonathanslenders/python-prompt-toolkit/issues/392
# 2. Related, when stdin is an SSH pipe, and no full terminal was allocated.
# See: https://github.com/jonathanslenders/python-prompt-toolkit/pull/165
def __init__(self, fileno: int) -> None:
self.fileno = fileno
self.attrs_before: Optional[List[Union[int, List[bytes]]]]
try:
self.attrs_before = termios.tcgetattr(fileno)
except termios.error:
# Ignore attribute errors.
self.attrs_before = None
def __enter__(self) -> None:
# NOTE: On os X systems, using pty.setraw() fails. Therefor we are using this:
try:
newattr = termios.tcgetattr(self.fileno)
except termios.error:
pass
else:
newattr[tty.LFLAG] = self._patch_lflag(newattr[tty.LFLAG])
newattr[tty.IFLAG] = self._patch_iflag(newattr[tty.IFLAG])
# VMIN defines the number of characters read at a time in
# non-canonical mode. It seems to default to 1 on Linux, but on
# Solaris and derived operating systems it defaults to 4. (This is
# because the VMIN slot is the same as the VEOF slot, which
# defaults to ASCII EOT = Ctrl-D = 4.)
newattr[tty.CC][termios.VMIN] = 1 # type: ignore
termios.tcsetattr(self.fileno, termios.TCSANOW, newattr)
# Put the terminal in cursor mode. (Instead of application mode.)
os.write(self.fileno, b"\x1b[?1l")
@classmethod
def _patch_lflag(cls, attrs):
return attrs & ~(termios.ECHO | termios.ICANON | termios.IEXTEN | termios.ISIG)
@classmethod
def _patch_iflag(cls, attrs):
return attrs & ~(
# Disable XON/XOFF flow control on output and input.
# (Don't capture Ctrl-S and Ctrl-Q.)
termios.IXON
| termios.IXOFF
|
termios.ICRNL
| termios.INLCR
| termios.IGNCR
)
def __exit__(self, *a: object) -> None:
if self.attrs_before is not None:
try:
termios.tcsetattr(self.fileno, termios.TCSANOW, self.attrs_before)
except termios.error:
pass
# # Put the terminal in application mode.
# self._stdout.write('\x1b[?1h')
class cooked_mode(raw_mode):
@classmethod
def _patch_lflag(cls, attrs):
return attrs | (termios.ECHO | termios.ICANON | termios.IEXTEN | termios.ISIG)
@classmethod
def _patch_iflag(cls, attrs):
# Turn the ICRNL flag back on. (Without this, calling `input()` in
# run_in_terminal doesn't work and displays ^M instead. Ptpython
# they translate ^M back into ^J.)
return attrs | termios.ICRNL
| true | true |
1c3833a131a1ebfe57e0a4626e989e801f84a185 | 480 | py | Python | pdf_first_page_to_jpg.py | Pakequis/Python_scripts | b0de2e2647ecaed331e7f8df2422b548df4b348c | [
"MIT"
] | null | null | null | pdf_first_page_to_jpg.py | Pakequis/Python_scripts | b0de2e2647ecaed331e7f8df2422b548df4b348c | [
"MIT"
] | null | null | null | pdf_first_page_to_jpg.py | Pakequis/Python_scripts | b0de2e2647ecaed331e7f8df2422b548df4b348c | [
"MIT"
] | null | null | null | # Library: pdf2image and poppler-0.68.0
import os
from pdf2image import convert_from_path
pdf_dir = "PATH" #Path to the pdf folder
os.chdir(pdf_dir)
print("PDF to image start...")
for pdf_file in os.listdir(pdf_dir):
if pdf_file.endswith(".pdf"):
pages = convert_from_path(pdf_file, 150,None,1,1) #second argument is the image quality
pdf_file = pdf_file[:-4]
pages[0].save("%s.jpg" % (pdf_file), "JPEG")
print(pdf_file)
print("End...") | 22.857143 | 95 | 0.66875 |
import os
from pdf2image import convert_from_path
pdf_dir = "PATH"
os.chdir(pdf_dir)
print("PDF to image start...")
for pdf_file in os.listdir(pdf_dir):
if pdf_file.endswith(".pdf"):
pages = convert_from_path(pdf_file, 150,None,1,1)
pdf_file = pdf_file[:-4]
pages[0].save("%s.jpg" % (pdf_file), "JPEG")
print(pdf_file)
print("End...") | true | true |
1c38359cb800082b537261c0842402b9fd7bf36f | 84 | py | Python | backend/fms_core/template_importer/row_handlers/container_rename/__init__.py | c3g/freezeman | bc4b6c8a2876e888ce41b7d14127cc22bc2b2143 | [
"W3C"
] | 2 | 2021-07-31T13:20:08.000Z | 2021-09-28T13:18:55.000Z | backend/fms_core/template_importer/row_handlers/container_creation/__init__.py | c3g/freezeman | bc4b6c8a2876e888ce41b7d14127cc22bc2b2143 | [
"W3C"
] | 71 | 2021-03-12T22:08:19.000Z | 2022-03-25T15:24:40.000Z | backend/fms_core/template_importer/row_handlers/container_rename/__init__.py | c3g/freezeman | bc4b6c8a2876e888ce41b7d14127cc22bc2b2143 | [
"W3C"
] | null | null | null | from .container import ContainerRowHandler
__all__ = [
"ContainerRowHandler",
] | 16.8 | 42 | 0.761905 | from .container import ContainerRowHandler
__all__ = [
"ContainerRowHandler",
] | true | true |
1c3836841e8cbea4eab578bd595d3c962670d427 | 4,334 | py | Python | senlin-7.0.0/senlin/objects/profile.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 45 | 2015-10-18T02:56:50.000Z | 2022-03-01T15:28:02.000Z | senlin-7.0.0/senlin/objects/profile.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | senlin-7.0.0/senlin/objects/profile.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 45 | 2015-10-19T02:35:57.000Z | 2021-09-28T09:01:42.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Profile object."""
from oslo_utils import uuidutils
from senlin.common import exception
from senlin.common import utils
from senlin.db import api as db_api
from senlin.objects import base
from senlin.objects import fields
@base.SenlinObjectRegistry.register
class Profile(base.SenlinObject, base.VersionedObjectDictCompat):
"""Senlin profile object."""
fields = {
'id': fields.UUIDField(),
'name': fields.StringField(),
'type': fields.StringField(),
'context': fields.JsonField(),
'spec': fields.JsonField(),
'created_at': fields.DateTimeField(),
'updated_at': fields.DateTimeField(nullable=True),
'user': fields.StringField(),
'project': fields.StringField(),
'domain': fields.StringField(nullable=True),
'permission': fields.StringField(nullable=True),
'metadata': fields.JsonField(nullable=True),
}
@classmethod
def create(cls, context, values):
values = cls._transpose_metadata(values)
obj = db_api.profile_create(context, values)
return cls._from_db_object(context, cls(context), obj)
@classmethod
def find(cls, context, identity, **kwargs):
"""Find a profile with the given identity.
:param context: An instance of the request context.
:param identity: The UUID, name or short-id of a profile.
:param project_safe: A boolean indicating whether profile from
projects other than the requesting one can be
returned.
:return: A DB object of profile or an exception `ResourceNotFound`
if no matching object is found.
"""
if uuidutils.is_uuid_like(identity):
profile = cls.get(context, identity, **kwargs)
if not profile:
profile = cls.get_by_name(context, identity, **kwargs)
else:
profile = cls.get_by_name(context, identity, **kwargs)
if not profile:
profile = cls.get_by_short_id(context, identity, **kwargs)
if not profile:
raise exception.ResourceNotFound(type='profile', id=identity)
return profile
@classmethod
def get(cls, context, profile_id, **kwargs):
obj = db_api.profile_get(context, profile_id, **kwargs)
return cls._from_db_object(context, cls(), obj)
@classmethod
def get_by_name(cls, context, name, **kwargs):
obj = db_api.profile_get_by_name(context, name, **kwargs)
return cls._from_db_object(context, cls(), obj)
@classmethod
def get_by_short_id(cls, context, short_id, **kwargs):
obj = db_api.profile_get_by_short_id(context, short_id, **kwargs)
return cls._from_db_object(context, cls(), obj)
@classmethod
def get_all(cls, context, **kwargs):
objs = db_api.profile_get_all(context, **kwargs)
return [cls._from_db_object(context, cls(), obj) for obj in objs]
@classmethod
def update(cls, context, obj_id, values):
values = cls._transpose_metadata(values)
obj = db_api.profile_update(context, obj_id, values)
return cls._from_db_object(context, cls(), obj)
@classmethod
def delete(cls, context, obj_id):
db_api.profile_delete(context, obj_id)
def to_dict(self):
profile_dict = {
'id': self.id,
'name': self.name,
'type': self.type,
'user': self.user,
'project': self.project,
'domain': self.domain,
'spec': self.spec,
'metadata': self.metadata,
'created_at': utils.isotime(self.created_at),
'updated_at': utils.isotime(self.updated_at)
}
return profile_dict
| 36.728814 | 75 | 0.643747 |
from oslo_utils import uuidutils
from senlin.common import exception
from senlin.common import utils
from senlin.db import api as db_api
from senlin.objects import base
from senlin.objects import fields
@base.SenlinObjectRegistry.register
class Profile(base.SenlinObject, base.VersionedObjectDictCompat):
fields = {
'id': fields.UUIDField(),
'name': fields.StringField(),
'type': fields.StringField(),
'context': fields.JsonField(),
'spec': fields.JsonField(),
'created_at': fields.DateTimeField(),
'updated_at': fields.DateTimeField(nullable=True),
'user': fields.StringField(),
'project': fields.StringField(),
'domain': fields.StringField(nullable=True),
'permission': fields.StringField(nullable=True),
'metadata': fields.JsonField(nullable=True),
}
@classmethod
def create(cls, context, values):
values = cls._transpose_metadata(values)
obj = db_api.profile_create(context, values)
return cls._from_db_object(context, cls(context), obj)
@classmethod
def find(cls, context, identity, **kwargs):
if uuidutils.is_uuid_like(identity):
profile = cls.get(context, identity, **kwargs)
if not profile:
profile = cls.get_by_name(context, identity, **kwargs)
else:
profile = cls.get_by_name(context, identity, **kwargs)
if not profile:
profile = cls.get_by_short_id(context, identity, **kwargs)
if not profile:
raise exception.ResourceNotFound(type='profile', id=identity)
return profile
@classmethod
def get(cls, context, profile_id, **kwargs):
obj = db_api.profile_get(context, profile_id, **kwargs)
return cls._from_db_object(context, cls(), obj)
@classmethod
def get_by_name(cls, context, name, **kwargs):
obj = db_api.profile_get_by_name(context, name, **kwargs)
return cls._from_db_object(context, cls(), obj)
@classmethod
def get_by_short_id(cls, context, short_id, **kwargs):
obj = db_api.profile_get_by_short_id(context, short_id, **kwargs)
return cls._from_db_object(context, cls(), obj)
@classmethod
def get_all(cls, context, **kwargs):
objs = db_api.profile_get_all(context, **kwargs)
return [cls._from_db_object(context, cls(), obj) for obj in objs]
@classmethod
def update(cls, context, obj_id, values):
values = cls._transpose_metadata(values)
obj = db_api.profile_update(context, obj_id, values)
return cls._from_db_object(context, cls(), obj)
@classmethod
def delete(cls, context, obj_id):
db_api.profile_delete(context, obj_id)
def to_dict(self):
profile_dict = {
'id': self.id,
'name': self.name,
'type': self.type,
'user': self.user,
'project': self.project,
'domain': self.domain,
'spec': self.spec,
'metadata': self.metadata,
'created_at': utils.isotime(self.created_at),
'updated_at': utils.isotime(self.updated_at)
}
return profile_dict
| true | true |
1c38375926e5e1f1169c96ab24e03ac84e1f9163 | 1,355 | py | Python | python/week9.py | Coding-Dojo-ACC/group-a-project | 5c5dbf5eb124f78a239de1f80f8b2178e6734e6a | [
"MIT"
] | null | null | null | python/week9.py | Coding-Dojo-ACC/group-a-project | 5c5dbf5eb124f78a239de1f80f8b2178e6734e6a | [
"MIT"
] | null | null | null | python/week9.py | Coding-Dojo-ACC/group-a-project | 5c5dbf5eb124f78a239de1f80f8b2178e6734e6a | [
"MIT"
] | 3 | 2021-02-13T01:20:23.000Z | 2021-02-26T22:29:13.000Z | # Count by 5's
# for x in range(5,1000,1) # start at 5 go till 1000 and count every number
# for x in range(5, 101, 5): # start at 5 go till 1000 and add 5 each iteration
# print(x)
# for y in range(5, 101, 1):
# if y % 5 == 0:
# print(y)
def flexCount(lowNum, hiNum, mult):
for i in range(lowNum, hiNum +1):
if i % mult == 0:
print(i)
# flexCount(2,9,3)
# l = 2
# h = 10
# m = 3
# for i in range(2,10):
# if i % m == 0:
# print(i)
def add(a,b):
x = a + b
print(x)
return x
# add(2,4)
def a():
return 5
# print(a())
def a():
return 5
# print(a()+a())
def a(b,c):
print(b + c)
# return b + c # adding in this line would allow it to actually concatonate the print
# print(a(1,2) + a(2,3))
# Return statement means STOP no more instructions will follow me
def a():
return 5
print(10)
# print(a())
# if we change the above to the following it will print 10 and 5
def a():
print(10)
return 5
# print(a())
def dojoWay():
for i in range(1, 101):
if i % 10 == 0: # if true follow my instructions if false move on
print("Coding Dojo")
elif i % 5 == 0: # if true follow my instructions if false move on
print("Coding")
else: # if all else fails just follow me
print(i)
dojoWay() | 17.828947 | 90 | 0.551292 |
# for x in range(5,1000,1) # start at 5 go till 1000 and count every number
# for x in range(5, 101, 5): # start at 5 go till 1000 and add 5 each iteration
# print(x)
# for y in range(5, 101, 1):
# if y % 5 == 0:
# print(y)
def flexCount(lowNum, hiNum, mult):
for i in range(lowNum, hiNum +1):
if i % mult == 0:
print(i)
# flexCount(2,9,3)
# l = 2
# h = 10
# m = 3
# for i in range(2,10):
# if i % m == 0:
# print(i)
def add(a,b):
x = a + b
print(x)
return x
# add(2,4)
def a():
return 5
# print(a())
def a():
return 5
# print(a()+a())
def a(b,c):
print(b + c)
# return b + c # adding in this line would allow it to actually concatonate the print
# print(a(1,2) + a(2,3))
# Return statement means STOP no more instructions will follow me
def a():
return 5
print(10)
# print(a())
# if we change the above to the following it will print 10 and 5
def a():
print(10)
return 5
# print(a())
def dojoWay():
for i in range(1, 101):
if i % 10 == 0: # if true follow my instructions if false move on
print("Coding Dojo")
elif i % 5 == 0: # if true follow my instructions if false move on
print("Coding")
else: # if all else fails just follow me
print(i)
dojoWay() | true | true |
1c3838f06848ee092c71412bf25ae399d92d7be1 | 7,375 | py | Python | napari/_vispy/vispy_image_layer.py | glyg/napari | be196dc979aed663a484f3a424dacc3227f7c958 | [
"BSD-3-Clause"
] | null | null | null | napari/_vispy/vispy_image_layer.py | glyg/napari | be196dc979aed663a484f3a424dacc3227f7c958 | [
"BSD-3-Clause"
] | null | null | null | napari/_vispy/vispy_image_layer.py | glyg/napari | be196dc979aed663a484f3a424dacc3227f7c958 | [
"BSD-3-Clause"
] | null | null | null | from vispy.scene.visuals import Image as ImageNode
from vispy.scene.visuals import Volume as VolumeNode
from vispy.color import Colormap
import numpy as np
from .vispy_base_layer import VispyBaseLayer
texture_dtypes = [
np.dtype(np.int8),
np.dtype(np.uint8),
np.dtype(np.int16),
np.dtype(np.uint16),
np.dtype(np.float32),
]
class VispyImageLayer(VispyBaseLayer):
def __init__(self, layer):
node = ImageNode(None, method='auto')
super().__init__(layer, node)
self.layer.events.rendering.connect(
lambda e: self._on_rendering_change()
)
self.layer.events.interpolation.connect(
lambda e: self._on_interpolation_change()
)
self.layer.events.colormap.connect(
lambda e: self._on_colormap_change()
)
self.layer.events.contrast_limits.connect(
lambda e: self._on_contrast_limits_change()
)
self.layer.events.gamma.connect(lambda e: self._on_gamma_change())
self._on_display_change()
self._on_data_change()
def _on_display_change(self):
parent = self.node.parent
self.node.parent = None
if self.layer.dims.ndisplay == 2:
self.node = ImageNode(None, method='auto')
else:
self.node = VolumeNode(np.zeros((1, 1, 1)))
self.node.parent = parent
self.reset()
def _on_data_change(self):
# Check if ndisplay has changed current node type needs updating
if (
self.layer.dims.ndisplay == 3
and not isinstance(self.node, VolumeNode)
) or (
self.layer.dims.ndisplay == 2
and not isinstance(self.node, ImageNode)
):
self._on_display_change()
data = self.layer._data_view
dtype = np.dtype(data.dtype)
if dtype not in texture_dtypes:
try:
dtype = dict(
i=np.int16, f=np.float32, u=np.uint16, b=np.uint8
)[dtype.kind]
except KeyError: # not an int or float
raise TypeError(
f'type {dtype} not allowed for texture; must be one of {set(texture_dtypes)}' # noqa: E501
)
data = data.astype(dtype)
if self.layer.dims.ndisplay == 3 and self.layer.dims.ndim == 2:
data = np.expand_dims(data, axis=0)
if self.layer.dims.ndisplay == 2:
self.node._need_colortransform_update = True
self.node.set_data(data)
else:
self.node.set_data(data, clim=self.layer.contrast_limits)
self.node.update()
def _on_interpolation_change(self):
if self.layer.dims.ndisplay == 2:
self.node.interpolation = self.layer.interpolation
def _on_rendering_change(self):
if not self.layer.dims.ndisplay == 2:
self.node.method = self.layer.rendering
def _on_colormap_change(self):
cmap = self.layer.colormap[1]
if self.layer.gamma != 1:
# when gamma!=1, we instantiate a new colormap
# with 256 control points from 0-1
cmap = Colormap(cmap[np.linspace(0, 1, 256) ** self.layer.gamma])
# Below is fixed in #1712
if not self.layer.dims.ndisplay == 2:
self.node.view_program['texture2D_LUT'] = (
cmap.texture_lut() if (hasattr(cmap, 'texture_lut')) else None
)
self.node.cmap = cmap
def _on_contrast_limits_change(self):
if self.layer.dims.ndisplay == 2:
self.node.clim = self.layer.contrast_limits
else:
self._on_data_change()
def _on_gamma_change(self):
self._on_colormap_change()
def _on_scale_change(self):
self.scale = [
self.layer.scale[d] * self.layer._scale_view[d]
for d in self.layer.dims.displayed[::-1]
]
if self.layer.is_pyramid:
self.layer.top_left = self.find_top_left()
self.layer.position = self._transform_position(self._position)
def _on_translate_change(self):
self.translate = [
self.layer.translate[d]
+ self.layer._translate_view[d]
+ self.layer.translate_grid[d]
for d in self.layer.dims.displayed[::-1]
]
self.layer.position = self._transform_position(self._position)
def compute_data_level(self, size):
"""Computed what level of the pyramid should be viewed given the
current size of the requested field of view.
Parameters
----------
size : 2-tuple
Requested size of field of view in image coordinates
Returns
----------
level : int
Level of the pyramid to be viewing.
"""
# Convert requested field of view from the camera into log units
size = np.log2(np.max(size))
# Max allowed tile in log units
max_size = np.log2(self.layer._max_tile_shape)
# Allow for more than 2x coverage of field of view with max tile
diff = size - max_size + 1.25
# Find closed downsample level to diff
ds = self.layer.level_downsamples[:, self.layer.dims.displayed].max(
axis=1
)
level = np.argmin(abs(np.log2(ds) - diff))
return level
def find_top_left(self):
"""Finds the top left pixel of the canvas. Depends on the current
pan and zoom position
Returns
----------
top_left : tuple of int
Coordinates of top left pixel.
"""
nd = self.layer.dims.ndisplay
# Find image coordinate of top left canvas pixel
if self.node.canvas is not None:
transform = self.node.canvas.scene.node_transform(self.node)
pos = (
transform.map([0, 0])[:nd]
+ self.translate[:nd] / self.scale[:nd]
)
else:
pos = [0] * nd
top_left = np.zeros(self.layer.ndim, dtype=int)
for i, d in enumerate(self.layer.dims.displayed[::-1]):
top_left[d] = pos[i]
# Clip according to the max image shape
top_left = np.clip(
top_left, 0, np.subtract(self.layer.level_shapes[0], 1)
)
# Convert to offset for image array
rounding_factor = self.layer._max_tile_shape / 4
top_left = rounding_factor * np.floor(top_left / rounding_factor)
return top_left.astype(int)
def on_draw(self, event):
"""Called whenever the canvas is drawn, which happens whenever new
data is sent to the canvas or the camera is moved.
"""
self.layer.scale_factor = self.scale_factor
if self.layer.is_pyramid:
self.layer.scale_factor = self.scale_factor
size = self.camera.rect.size
data_level = self.compute_data_level(size)
if data_level != self.layer.data_level:
self.layer.data_level = data_level
else:
self.layer.top_left = self.find_top_left()
def reset(self):
self._reset_base()
self._on_interpolation_change()
self._on_rendering_change()
self._on_colormap_change()
if self.layer.dims.ndisplay == 2:
self._on_contrast_limits_change()
| 33.371041 | 111 | 0.591051 | from vispy.scene.visuals import Image as ImageNode
from vispy.scene.visuals import Volume as VolumeNode
from vispy.color import Colormap
import numpy as np
from .vispy_base_layer import VispyBaseLayer
texture_dtypes = [
np.dtype(np.int8),
np.dtype(np.uint8),
np.dtype(np.int16),
np.dtype(np.uint16),
np.dtype(np.float32),
]
class VispyImageLayer(VispyBaseLayer):
def __init__(self, layer):
node = ImageNode(None, method='auto')
super().__init__(layer, node)
self.layer.events.rendering.connect(
lambda e: self._on_rendering_change()
)
self.layer.events.interpolation.connect(
lambda e: self._on_interpolation_change()
)
self.layer.events.colormap.connect(
lambda e: self._on_colormap_change()
)
self.layer.events.contrast_limits.connect(
lambda e: self._on_contrast_limits_change()
)
self.layer.events.gamma.connect(lambda e: self._on_gamma_change())
self._on_display_change()
self._on_data_change()
def _on_display_change(self):
parent = self.node.parent
self.node.parent = None
if self.layer.dims.ndisplay == 2:
self.node = ImageNode(None, method='auto')
else:
self.node = VolumeNode(np.zeros((1, 1, 1)))
self.node.parent = parent
self.reset()
def _on_data_change(self):
if (
self.layer.dims.ndisplay == 3
and not isinstance(self.node, VolumeNode)
) or (
self.layer.dims.ndisplay == 2
and not isinstance(self.node, ImageNode)
):
self._on_display_change()
data = self.layer._data_view
dtype = np.dtype(data.dtype)
if dtype not in texture_dtypes:
try:
dtype = dict(
i=np.int16, f=np.float32, u=np.uint16, b=np.uint8
)[dtype.kind]
except KeyError:
raise TypeError(
f'type {dtype} not allowed for texture; must be one of {set(texture_dtypes)}'
)
data = data.astype(dtype)
if self.layer.dims.ndisplay == 3 and self.layer.dims.ndim == 2:
data = np.expand_dims(data, axis=0)
if self.layer.dims.ndisplay == 2:
self.node._need_colortransform_update = True
self.node.set_data(data)
else:
self.node.set_data(data, clim=self.layer.contrast_limits)
self.node.update()
def _on_interpolation_change(self):
if self.layer.dims.ndisplay == 2:
self.node.interpolation = self.layer.interpolation
def _on_rendering_change(self):
if not self.layer.dims.ndisplay == 2:
self.node.method = self.layer.rendering
def _on_colormap_change(self):
cmap = self.layer.colormap[1]
if self.layer.gamma != 1:
cmap = Colormap(cmap[np.linspace(0, 1, 256) ** self.layer.gamma])
if not self.layer.dims.ndisplay == 2:
self.node.view_program['texture2D_LUT'] = (
cmap.texture_lut() if (hasattr(cmap, 'texture_lut')) else None
)
self.node.cmap = cmap
def _on_contrast_limits_change(self):
if self.layer.dims.ndisplay == 2:
self.node.clim = self.layer.contrast_limits
else:
self._on_data_change()
def _on_gamma_change(self):
self._on_colormap_change()
def _on_scale_change(self):
self.scale = [
self.layer.scale[d] * self.layer._scale_view[d]
for d in self.layer.dims.displayed[::-1]
]
if self.layer.is_pyramid:
self.layer.top_left = self.find_top_left()
self.layer.position = self._transform_position(self._position)
def _on_translate_change(self):
self.translate = [
self.layer.translate[d]
+ self.layer._translate_view[d]
+ self.layer.translate_grid[d]
for d in self.layer.dims.displayed[::-1]
]
self.layer.position = self._transform_position(self._position)
def compute_data_level(self, size):
size = np.log2(np.max(size))
max_size = np.log2(self.layer._max_tile_shape)
diff = size - max_size + 1.25
ds = self.layer.level_downsamples[:, self.layer.dims.displayed].max(
axis=1
)
level = np.argmin(abs(np.log2(ds) - diff))
return level
def find_top_left(self):
nd = self.layer.dims.ndisplay
if self.node.canvas is not None:
transform = self.node.canvas.scene.node_transform(self.node)
pos = (
transform.map([0, 0])[:nd]
+ self.translate[:nd] / self.scale[:nd]
)
else:
pos = [0] * nd
top_left = np.zeros(self.layer.ndim, dtype=int)
for i, d in enumerate(self.layer.dims.displayed[::-1]):
top_left[d] = pos[i]
top_left = np.clip(
top_left, 0, np.subtract(self.layer.level_shapes[0], 1)
)
rounding_factor = self.layer._max_tile_shape / 4
top_left = rounding_factor * np.floor(top_left / rounding_factor)
return top_left.astype(int)
def on_draw(self, event):
self.layer.scale_factor = self.scale_factor
if self.layer.is_pyramid:
self.layer.scale_factor = self.scale_factor
size = self.camera.rect.size
data_level = self.compute_data_level(size)
if data_level != self.layer.data_level:
self.layer.data_level = data_level
else:
self.layer.top_left = self.find_top_left()
def reset(self):
self._reset_base()
self._on_interpolation_change()
self._on_rendering_change()
self._on_colormap_change()
if self.layer.dims.ndisplay == 2:
self._on_contrast_limits_change()
| true | true |
1c3839b056d2f071491f44d4a6e908496eab21cc | 979 | py | Python | setup.py | spell00/pytorch-fmri | 42abd1e5a9a54cd4f4b7e0e89579301203d0aeb0 | [
"MIT"
] | null | null | null | setup.py | spell00/pytorch-fmri | 42abd1e5a9a54cd4f4b7e0e89579301203d0aeb0 | [
"MIT"
] | null | null | null | setup.py | spell00/pytorch-fmri | 42abd1e5a9a54cd4f4b7e0e89579301203d0aeb0 | [
"MIT"
] | null | null | null | import os
import sys
from distutils.sysconfig import get_python_lib
from distutils.core import setup
from setuptools import find_packages
CURRENT_PYTHON = sys.version_info[:2]
setup(name="fmri", version="0.2",
description="A package for learning to classify raw audio according to a user's self-definied "
"scores of appreciation",
url="https://github.com/pytorch/audio",
packages=find_packages(),
author="Simon J Pelletier",
author_email="simonjpelletier@gmail.com",
install_requires=['torch',
'matplotlib',
'numpy',
'scipy',
'Unidecode',
'nibabel',
'nilearn',
'tqdm',
'torchvision',
'ax-platform',
'tensorboardX'
# 'apex'
]
)
| 31.580645 | 101 | 0.490296 | import os
import sys
from distutils.sysconfig import get_python_lib
from distutils.core import setup
from setuptools import find_packages
CURRENT_PYTHON = sys.version_info[:2]
setup(name="fmri", version="0.2",
description="A package for learning to classify raw audio according to a user's self-definied "
"scores of appreciation",
url="https://github.com/pytorch/audio",
packages=find_packages(),
author="Simon J Pelletier",
author_email="simonjpelletier@gmail.com",
install_requires=['torch',
'matplotlib',
'numpy',
'scipy',
'Unidecode',
'nibabel',
'nilearn',
'tqdm',
'torchvision',
'ax-platform',
'tensorboardX'
# 'apex'
]
)
| true | true |
1c383a6063e6bece097dd6a9255159222e9b8380 | 70,322 | py | Python | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer_test.py | jaymessina3/model-analysis | 8638ad375d860a97df5938850c59c72b0def995a | [
"Apache-2.0"
] | 1,118 | 2018-03-30T02:01:17.000Z | 2022-03-22T07:25:58.000Z | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer_test.py | jaymessina3/model-analysis | 8638ad375d860a97df5938850c59c72b0def995a | [
"Apache-2.0"
] | 82 | 2018-04-12T12:54:05.000Z | 2022-03-31T11:14:31.000Z | tensorflow_model_analysis/writers/metrics_plots_and_validations_writer_test.py | jaymessina3/model-analysis | 8638ad375d860a97df5938850c59c72b0def995a | [
"Apache-2.0"
] | 280 | 2018-03-30T15:58:02.000Z | 2022-03-21T07:41:53.000Z | # Lint as: python3
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test for using the MetricsPlotsAndValidationsWriter API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import string
import tempfile
from absl.testing import parameterized
import apache_beam as beam
from apache_beam.testing import util
import numpy as np
import tensorflow as tf
from tensorflow_model_analysis import constants
from tensorflow_model_analysis import types
from tensorflow_model_analysis.api import model_eval_lib
from tensorflow_model_analysis.eval_saved_model import testutil
from tensorflow_model_analysis.eval_saved_model.example_trainers import fixed_prediction_estimator
from tensorflow_model_analysis.evaluators import legacy_metrics_and_plots_evaluator
from tensorflow_model_analysis.evaluators import metrics_plots_and_validations_evaluator
from tensorflow_model_analysis.extractors import example_weights_extractor
from tensorflow_model_analysis.extractors import features_extractor
from tensorflow_model_analysis.extractors import labels_extractor
from tensorflow_model_analysis.extractors import legacy_predict_extractor
from tensorflow_model_analysis.extractors import predictions_extractor
from tensorflow_model_analysis.extractors import slice_key_extractor
from tensorflow_model_analysis.extractors import unbatch_extractor
from tensorflow_model_analysis.metrics import attributions
from tensorflow_model_analysis.metrics import binary_confusion_matrices
from tensorflow_model_analysis.metrics import metric_types
from tensorflow_model_analysis.post_export_metrics import metric_keys
from tensorflow_model_analysis.post_export_metrics import post_export_metrics
from tensorflow_model_analysis.proto import config_pb2
from tensorflow_model_analysis.proto import metrics_for_slice_pb2
from tensorflow_model_analysis.proto import validation_result_pb2
from tensorflow_model_analysis.slicer import slicer_lib as slicer
from tensorflow_model_analysis.writers import metrics_plots_and_validations_writer
from tfx_bsl.tfxio import raw_tf_record
from tfx_bsl.tfxio import tensor_adapter
from tfx_bsl.tfxio import test_util
from google.protobuf import text_format
from tensorflow_metadata.proto.v0 import schema_pb2
def _make_slice_key(*args):
if len(args) % 2 != 0:
raise ValueError('number of arguments should be even')
result = []
for i in range(0, len(args), 2):
result.append((args[i], args[i + 1]))
result = tuple(result)
return result
class MetricsPlotsAndValidationsWriterTest(testutil.TensorflowModelAnalysisTest,
parameterized.TestCase):
def setUp(self):
super(MetricsPlotsAndValidationsWriterTest, self).setUp()
self.longMessage = True # pylint: disable=invalid-name
def _getTempDir(self):
return tempfile.mkdtemp()
def _getExportDir(self):
return os.path.join(self._getTempDir(), 'export_dir')
def _getBaselineDir(self):
return os.path.join(self._getTempDir(), 'baseline_export_dir')
def _build_keras_model(self, model_dir, mul):
input_layer = tf.keras.layers.Input(shape=(1,), name='input_1')
output_layer = tf.keras.layers.Lambda(
lambda x, mul: x * mul, output_shape=(1,), arguments={'mul': mul})(
input_layer)
model = tf.keras.models.Model([input_layer], output_layer)
model.compile(
optimizer=tf.keras.optimizers.Adam(lr=.001),
loss=tf.keras.losses.BinaryCrossentropy(),
metrics=['accuracy'])
model.fit(x=[[0], [1]], y=[[0], [1]], steps_per_epoch=1)
model.save(model_dir, save_format='tf')
return self.createTestEvalSharedModel(
eval_saved_model_path=model_dir, tags=[tf.saved_model.SERVING])
def testConvertSlicePlotsToProto(self):
slice_key = _make_slice_key('fruit', 'apple')
plot_key = metric_types.PlotKey(
name='calibration_plot', output_name='output_name')
calibration_plot = text_format.Parse(
"""
buckets {
lower_threshold_inclusive: -inf
upper_threshold_exclusive: 0.0
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
buckets {
lower_threshold_inclusive: 0.0
upper_threshold_exclusive: 0.5
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 1.0 }
total_weighted_refined_prediction { value: 0.3 }
}
buckets {
lower_threshold_inclusive: 0.5
upper_threshold_exclusive: 1.0
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.7 }
}
buckets {
lower_threshold_inclusive: 1.0
upper_threshold_exclusive: inf
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
""", metrics_for_slice_pb2.CalibrationHistogramBuckets())
expected_plots_for_slice = text_format.Parse(
"""
slice_key {
single_slice_keys {
column: 'fruit'
bytes_value: 'apple'
}
}
plot_keys_and_values {
key {
output_name: "output_name"
}
value {
calibration_histogram_buckets {
buckets {
lower_threshold_inclusive: -inf
upper_threshold_exclusive: 0.0
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
buckets {
lower_threshold_inclusive: 0.0
upper_threshold_exclusive: 0.5
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 1.0 }
total_weighted_refined_prediction { value: 0.3 }
}
buckets {
lower_threshold_inclusive: 0.5
upper_threshold_exclusive: 1.0
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.7 }
}
buckets {
lower_threshold_inclusive: 1.0
upper_threshold_exclusive: inf
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
}
}
}
""", metrics_for_slice_pb2.PlotsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_plots_to_proto(
(slice_key, {
plot_key: calibration_plot
}), None)
self.assertProtoEquals(expected_plots_for_slice, got)
def testConvertSlicePlotsToProtoLegacyStringKeys(self):
slice_key = _make_slice_key('fruit', 'apple')
tfma_plots = {
metric_keys.CALIBRATION_PLOT_MATRICES:
np.array([
[0.0, 0.0, 0.0],
[0.3, 1.0, 1.0],
[0.7, 0.0, 1.0],
[0.0, 0.0, 0.0],
]),
metric_keys.CALIBRATION_PLOT_BOUNDARIES:
np.array([0.0, 0.5, 1.0]),
}
expected_plot_data = """
slice_key {
single_slice_keys {
column: 'fruit'
bytes_value: 'apple'
}
}
plots {
key: "post_export_metrics"
value {
calibration_histogram_buckets {
buckets {
lower_threshold_inclusive: -inf
upper_threshold_exclusive: 0.0
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
buckets {
lower_threshold_inclusive: 0.0
upper_threshold_exclusive: 0.5
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 1.0 }
total_weighted_refined_prediction { value: 0.3 }
}
buckets {
lower_threshold_inclusive: 0.5
upper_threshold_exclusive: 1.0
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.7 }
}
buckets {
lower_threshold_inclusive: 1.0
upper_threshold_exclusive: inf
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
}
}
}
"""
calibration_plot = (
post_export_metrics.calibration_plot_and_prediction_histogram())
got = metrics_plots_and_validations_writer.convert_slice_plots_to_proto(
(slice_key, tfma_plots), [calibration_plot])
self.assertProtoEquals(expected_plot_data, got)
def testConvertSlicePlotsToProtoEmptyPlot(self):
slice_key = _make_slice_key('fruit', 'apple')
tfma_plots = {metric_keys.ERROR_METRIC: 'error_message'}
actual_plot = metrics_plots_and_validations_writer.convert_slice_plots_to_proto(
(slice_key, tfma_plots), [])
expected_plot = metrics_for_slice_pb2.PlotsForSlice()
expected_plot.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
expected_plot.plots[
metric_keys.ERROR_METRIC].debug_message = 'error_message'
self.assertProtoEquals(expected_plot, actual_plot)
def testConvertSliceMetricsToProto(self):
slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3)
slice_metrics = {
metric_types.MetricKey(name='accuracy', output_name='output_name'): 0.8
}
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {
single_slice_keys {
column: 'age'
int64_value: 5
}
single_slice_keys {
column: 'language'
bytes_value: 'english'
}
single_slice_keys {
column: 'price'
float_value: 0.3
}
}
metric_keys_and_values {
key {
name: "accuracy"
output_name: "output_name"
}
value {
double_value {
value: 0.8
}
}
}""", metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics), None)
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoConfusionMatrices(self):
slice_key = _make_slice_key()
slice_metrics = {
metric_types.MetricKey(name='confusion_matrix_at_thresholds'):
binary_confusion_matrices.Matrices(
thresholds=[0.25, 0.75, 1.00],
fn=[0.0, 1.0, 2.0],
tn=[1.0, 1.0, 1.0],
fp=[0.0, 0.0, 0.0],
tp=[2.0, 1.0, 0.0])
}
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {}
metric_keys_and_values {
key: { name: "confusion_matrix_at_thresholds" }
value {
confusion_matrix_at_thresholds {
matrices {
threshold: 0.25
false_negatives: 0.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 2.0
precision: 1.0
recall: 1.0
}
matrices {
threshold: 0.75
false_negatives: 1.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 1.0
precision: 1.0
recall: 0.5
}
matrices {
threshold: 1.00
false_negatives: 2.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 0.0
precision: 1.0
recall: 0.0
}
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics), add_metrics_callbacks=[])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoConfusionMatricesPostExport(self):
slice_key = _make_slice_key()
thresholds = [0.25, 0.75, 1.00]
matrices = [[0.0, 1.0, 0.0, 2.0, 1.0, 1.0], [1.0, 1.0, 0.0, 1.0, 1.0, 0.5],
[2.0, 1.0, 0.0, 0.0, float('nan'), 0.0]]
slice_metrics = {
metric_keys.CONFUSION_MATRIX_AT_THRESHOLDS_MATRICES: matrices,
metric_keys.CONFUSION_MATRIX_AT_THRESHOLDS_THRESHOLDS: thresholds,
}
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {}
metrics {
key: "post_export_metrics/confusion_matrix_at_thresholds"
value {
confusion_matrix_at_thresholds {
matrices {
threshold: 0.25
false_negatives: 0.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 2.0
precision: 1.0
recall: 1.0
bounded_false_negatives {
value {
value: 0.0
}
}
bounded_true_negatives {
value {
value: 1.0
}
}
bounded_true_positives {
value {
value: 2.0
}
}
bounded_false_positives {
value {
value: 0.0
}
}
bounded_precision {
value {
value: 1.0
}
}
bounded_recall {
value {
value: 1.0
}
}
t_distribution_false_negatives {
unsampled_value {
value: 0.0
}
}
t_distribution_true_negatives {
unsampled_value {
value: 1.0
}
}
t_distribution_true_positives {
unsampled_value {
value: 2.0
}
}
t_distribution_false_positives {
unsampled_value {
value: 0.0
}
}
t_distribution_precision {
unsampled_value {
value: 1.0
}
}
t_distribution_recall {
unsampled_value {
value: 1.0
}
}
}
matrices {
threshold: 0.75
false_negatives: 1.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 1.0
precision: 1.0
recall: 0.5
bounded_false_negatives {
value {
value: 1.0
}
}
bounded_true_negatives {
value {
value: 1.0
}
}
bounded_true_positives {
value {
value: 1.0
}
}
bounded_false_positives {
value {
value: 0.0
}
}
bounded_precision {
value {
value: 1.0
}
}
bounded_recall {
value {
value: 0.5
}
}
t_distribution_false_negatives {
unsampled_value {
value: 1.0
}
}
t_distribution_true_negatives {
unsampled_value {
value: 1.0
}
}
t_distribution_true_positives {
unsampled_value {
value: 1.0
}
}
t_distribution_false_positives {
unsampled_value {
value: 0.0
}
}
t_distribution_precision {
unsampled_value {
value: 1.0
}
}
t_distribution_recall {
unsampled_value {
value: 0.5
}
}
}
matrices {
threshold: 1.00
false_negatives: 2.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 0.0
precision: nan
recall: 0.0
bounded_false_negatives {
value {
value: 2.0
}
}
bounded_true_negatives {
value {
value: 1.0
}
}
bounded_true_positives {
value {
value: 0.0
}
}
bounded_false_positives {
value {
value: 0.0
}
}
bounded_precision {
value {
value: nan
}
}
bounded_recall {
value {
value: 0.0
}
}
t_distribution_false_negatives {
unsampled_value {
value: 2.0
}
}
t_distribution_true_negatives {
unsampled_value {
value: 1.0
}
}
t_distribution_true_positives {
unsampled_value {
value: 0.0
}
}
t_distribution_false_positives {
unsampled_value {
value: 0.0
}
}
t_distribution_precision {
unsampled_value {
value: nan
}
}
t_distribution_recall {
unsampled_value {
value: 0.0
}
}
}
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics),
[post_export_metrics.confusion_matrix_at_thresholds(thresholds)])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoMetricsRanges(self):
slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3)
slice_metrics = {
'accuracy': types.ValueWithTDistribution(0.8, 0.1, 9, 0.8),
metric_keys.AUPRC: 0.1,
metric_keys.lower_bound_key(metric_keys.AUPRC): 0.05,
metric_keys.upper_bound_key(metric_keys.AUPRC): 0.17,
metric_keys.AUC: 0.2,
metric_keys.lower_bound_key(metric_keys.AUC): 0.1,
metric_keys.upper_bound_key(metric_keys.AUC): 0.3
}
expected_metrics_for_slice = text_format.Parse(
string.Template("""
slice_key {
single_slice_keys {
column: 'age'
int64_value: 5
}
single_slice_keys {
column: 'language'
bytes_value: 'english'
}
single_slice_keys {
column: 'price'
float_value: 0.3
}
}
metrics {
key: "accuracy"
value {
bounded_value {
value {
value: 0.8
}
lower_bound { value: 0.5737843 }
upper_bound { value: 1.0262157 }
methodology: POISSON_BOOTSTRAP
}
}
}
metrics {
key: "$auc"
value {
bounded_value {
lower_bound {
value: 0.1
}
upper_bound {
value: 0.3
}
value {
value: 0.2
}
methodology: RIEMANN_SUM
}
}
}
metrics {
key: "$auprc"
value {
bounded_value {
lower_bound {
value: 0.05
}
upper_bound {
value: 0.17
}
value {
value: 0.1
}
methodology: RIEMANN_SUM
}
}
}""").substitute(auc=metric_keys.AUC, auprc=metric_keys.AUPRC),
metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics),
[post_export_metrics.auc(),
post_export_metrics.auc(curve='PR')])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoFromLegacyStrings(self):
slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3)
slice_metrics = {
'accuracy': 0.8,
metric_keys.AUPRC: 0.1,
metric_keys.lower_bound_key(metric_keys.AUPRC): 0.05,
metric_keys.upper_bound_key(metric_keys.AUPRC): 0.17,
metric_keys.AUC: 0.2,
metric_keys.lower_bound_key(metric_keys.AUC): 0.1,
metric_keys.upper_bound_key(metric_keys.AUC): 0.3
}
expected_metrics_for_slice = text_format.Parse(
string.Template("""
slice_key {
single_slice_keys {
column: 'age'
int64_value: 5
}
single_slice_keys {
column: 'language'
bytes_value: 'english'
}
single_slice_keys {
column: 'price'
float_value: 0.3
}
}
metrics {
key: "accuracy"
value {
double_value {
value: 0.8
}
}
}
metrics {
key: "$auc"
value {
bounded_value {
lower_bound {
value: 0.1
}
upper_bound {
value: 0.3
}
value {
value: 0.2
}
methodology: RIEMANN_SUM
}
}
}
metrics {
key: "$auprc"
value {
bounded_value {
lower_bound {
value: 0.05
}
upper_bound {
value: 0.17
}
value {
value: 0.1
}
methodology: RIEMANN_SUM
}
}
}""").substitute(auc=metric_keys.AUC, auprc=metric_keys.AUPRC),
metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics),
[post_export_metrics.auc(),
post_export_metrics.auc(curve='PR')])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoEmptyMetrics(self):
slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3)
slice_metrics = {metric_keys.ERROR_METRIC: 'error_message'}
actual_metrics = (
metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics),
[post_export_metrics.auc(),
post_export_metrics.auc(curve='PR')]))
expected_metrics = metrics_for_slice_pb2.MetricsForSlice()
expected_metrics.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
expected_metrics.metrics[
metric_keys.ERROR_METRIC].debug_message = 'error_message'
self.assertProtoEquals(expected_metrics, actual_metrics)
def testConvertSliceMetricsToProtoStringMetrics(self):
slice_key = _make_slice_key()
slice_metrics = {
'valid_ascii': b'test string',
'valid_unicode': b'\xF0\x9F\x90\x84', # U+1F404, Cow
'invalid_unicode': b'\xE2\x28\xA1',
}
expected_metrics_for_slice = metrics_for_slice_pb2.MetricsForSlice()
expected_metrics_for_slice.slice_key.SetInParent()
expected_metrics_for_slice.metrics[
'valid_ascii'].bytes_value = slice_metrics['valid_ascii']
expected_metrics_for_slice.metrics[
'valid_unicode'].bytes_value = slice_metrics['valid_unicode']
expected_metrics_for_slice.metrics[
'invalid_unicode'].bytes_value = slice_metrics['invalid_unicode']
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics), [])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testCombineValidationsValidationOk(self):
input_validations = [
text_format.Parse(
"""
validation_ok: true
metric_validations_per_slice {
slice_key {
single_slice_keys {
column: "x"
bytes_value: "x1"
}
}
}
validation_details {
slicing_details {
slicing_spec {}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 1
}
}""", validation_result_pb2.ValidationResult()),
text_format.Parse(
"""
validation_ok: true
validation_details {
slicing_details {
slicing_spec {
feature_keys: ["x"]
}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 2
}
}""", validation_result_pb2.ValidationResult())
]
eval_config = config_pb2.EvalConfig(
model_specs=[
config_pb2.ModelSpec(name='candidate'),
config_pb2.ModelSpec(name='baseline', is_baseline=True)
],
slicing_specs=[config_pb2.SlicingSpec()],
metrics_specs=[
config_pb2.MetricsSpec(
metrics=[
config_pb2.MetricConfig(
class_name='AUC',
per_slice_thresholds=[
config_pb2.PerSliceMetricThreshold(
slicing_specs=[config_pb2.SlicingSpec()],
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2
.GenericValueThreshold(
lower_bound={'value': 0.7})))
]),
],
model_names=['candidate', 'baseline']),
])
expected_validation = text_format.Parse(
"""
validation_ok: true
metric_validations_per_slice {
slice_key {
single_slice_keys {
column: "x"
bytes_value: "x1"
}
}
}
validation_details {
slicing_details {
slicing_spec {}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 3
}
slicing_details {
slicing_spec {
feature_keys: ["x"]
}
num_matching_slices: 1
}
}""", validation_result_pb2.ValidationResult())
def verify_fn(result):
self.assertLen(result, 1)
self.assertProtoEquals(expected_validation, result[0])
with beam.Pipeline() as pipeline:
result = (
pipeline
| 'Create' >> beam.Create(input_validations)
| 'CombineValidations' >> beam.CombineGlobally(
metrics_plots_and_validations_writer.CombineValidations(
eval_config)))
util.assert_that(result, verify_fn)
def testCombineValidationsMissingSlices(self):
input_validations = [
text_format.Parse(
"""
validation_ok: false
metric_validations_per_slice {
slice_key {
single_slice_keys {
column: "x"
bytes_value: "x1"
}
}
failures {
metric_key {
name: "auc"
model_name: "candidate"
is_diff: true
}
metric_threshold {
value_threshold {
lower_bound { value: 0.7 }
}
}
metric_value {
double_value { value: 0.6 }
}
}
}
validation_details {
slicing_details {
slicing_spec {}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 1
}
}""", validation_result_pb2.ValidationResult()),
text_format.Parse(
"""
validation_ok: true
validation_details {
slicing_details {
slicing_spec {
feature_keys: ["x"]
}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 2
}
}""", validation_result_pb2.ValidationResult())
]
slicing_specs = [
config_pb2.SlicingSpec(),
config_pb2.SlicingSpec(feature_keys=['x']),
config_pb2.SlicingSpec(feature_keys=['x', 'y']),
config_pb2.SlicingSpec(feature_keys=['z']),
]
eval_config = config_pb2.EvalConfig(
model_specs=[
config_pb2.ModelSpec(name='candidate'),
config_pb2.ModelSpec(name='baseline', is_baseline=True)
],
slicing_specs=slicing_specs,
metrics_specs=[
config_pb2.MetricsSpec(
metrics=[
config_pb2.MetricConfig(
class_name='AUC',
per_slice_thresholds=[
config_pb2.PerSliceMetricThreshold(
slicing_specs=slicing_specs,
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2
.GenericValueThreshold(
lower_bound={'value': 0.7})))
]),
],
model_names=['candidate', 'baseline']),
])
expected_validation = text_format.Parse(
"""
validation_ok: false
metric_validations_per_slice {
slice_key {
single_slice_keys {
column: "x"
bytes_value: "x1"
}
}
failures {
metric_key {
name: "auc"
model_name: "candidate"
is_diff: true
}
metric_threshold {
value_threshold {
lower_bound { value: 0.7 }
}
}
metric_value {
double_value { value: 0.6 }
}
}
}
missing_slices {
feature_keys: "z"
}
validation_details {
slicing_details {
slicing_spec {}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 3
}
slicing_details {
slicing_spec {
feature_keys: ["x"]
}
num_matching_slices: 1
}
}""", validation_result_pb2.ValidationResult())
def verify_fn(result):
self.assertLen(result, 1)
self.assertProtoEquals(expected_validation, result[0])
with beam.Pipeline() as pipeline:
result = (
pipeline
| 'Create' >> beam.Create(input_validations)
| 'CombineValidations' >> beam.CombineGlobally(
metrics_plots_and_validations_writer.CombineValidations(
eval_config)))
util.assert_that(result, verify_fn)
def testUncertaintyValuedMetrics(self):
slice_key = _make_slice_key()
slice_metrics = {
'one_dim':
types.ValueWithTDistribution(2.0, 1.0, 3, 2.0),
'nans':
types.ValueWithTDistribution(
float('nan'), float('nan'), -1, float('nan')),
}
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {}
metrics {
key: "one_dim"
value {
bounded_value {
value {
value: 2.0
}
lower_bound {
value: -1.1824463
}
upper_bound {
value: 5.1824463
}
methodology: POISSON_BOOTSTRAP
}
}
}
metrics {
key: "nans"
value {
bounded_value {
value {
value: nan
}
lower_bound {
value: nan
}
upper_bound {
value: nan
}
methodology: POISSON_BOOTSTRAP
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics), [])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoTensorValuedMetrics(self):
slice_key = _make_slice_key()
slice_metrics = {
'one_dim':
np.array([1.0, 2.0, 3.0, 4.0], dtype=np.float32),
'two_dims':
np.array([['two', 'dims', 'test'], ['TWO', 'DIMS', 'TEST']]),
'three_dims':
np.array([[[100, 200, 300]], [[500, 600, 700]]], dtype=np.int64),
}
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {}
metrics {
key: "one_dim"
value {
array_value {
data_type: FLOAT32
shape: 4
float32_values: [1.0, 2.0, 3.0, 4.0]
}
}
}
metrics {
key: "two_dims"
value {
array_value {
data_type: BYTES
shape: [2, 3]
bytes_values: ["two", "dims", "test", "TWO", "DIMS", "TEST"]
}
}
}
metrics {
key: "three_dims"
value {
array_value {
data_type: INT64
shape: [2, 1, 3]
int64_values: [100, 200, 300, 500, 600, 700]
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics), [])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceAttributionsToProto(self):
slice_key = _make_slice_key('language', 'english', 'price', 0.3)
slice_attributions = {
metric_types.AttributionsKey(name='mean', output_name='output_name'): {
'age': 0.8,
'language': 1.2,
'price': 2.3,
},
}
expected_attributions_for_slice = text_format.Parse(
"""
slice_key {
single_slice_keys {
column: 'language'
bytes_value: 'english'
}
single_slice_keys {
column: 'price'
float_value: 0.3
}
}
attributions_keys_and_values {
key {
name: "mean"
output_name: "output_name"
}
values {
key: "age"
value: {
double_value {
value: 0.8
}
}
}
values {
key: "language"
value: {
double_value {
value: 1.2
}
}
}
values {
key: "price"
value: {
double_value {
value: 2.3
}
}
}
}""", metrics_for_slice_pb2.AttributionsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_attributions_to_proto(
(slice_key, slice_attributions))
self.assertProtoEquals(expected_attributions_for_slice, got)
_OUTPUT_FORMAT_PARAMS = [('without_output_file_format', ''),
('tfrecord_file_format', 'tfrecord'),
('parquet_file_format', 'parquet')]
@parameterized.named_parameters(_OUTPUT_FORMAT_PARAMS)
def testWriteValidationResults(self, output_file_format):
model_dir, baseline_dir = self._getExportDir(), self._getBaselineDir()
eval_shared_model = self._build_keras_model(model_dir, mul=0)
baseline_eval_shared_model = self._build_keras_model(baseline_dir, mul=1)
validations_file = os.path.join(self._getTempDir(),
constants.VALIDATIONS_KEY)
schema = text_format.Parse(
"""
tensor_representation_group {
key: ""
value {
tensor_representation {
key: "input_1"
value {
dense_tensor {
column_name: "input_1"
shape { dim { size: 1 } }
}
}
}
}
}
feature {
name: "input_1"
type: FLOAT
}
feature {
name: "label"
type: FLOAT
}
feature {
name: "example_weight"
type: FLOAT
}
feature {
name: "extra_feature"
type: BYTES
}
""", schema_pb2.Schema())
tfx_io = test_util.InMemoryTFExampleRecord(
schema=schema, raw_record_column_name=constants.ARROW_INPUT_COLUMN)
tensor_adapter_config = tensor_adapter.TensorAdapterConfig(
arrow_schema=tfx_io.ArrowSchema(),
tensor_representations=tfx_io.TensorRepresentations())
examples = [
self._makeExample(
input_1=0.0,
label=1.0,
example_weight=1.0,
extra_feature='non_model_feature'),
self._makeExample(
input_1=1.0,
label=0.0,
example_weight=0.5,
extra_feature='non_model_feature'),
]
slicing_specs = [
config_pb2.SlicingSpec(),
config_pb2.SlicingSpec(feature_keys=['slice_does_not_exist'])
]
cross_slicing_specs = [
config_pb2.CrossSlicingSpec(
baseline_spec=config_pb2.SlicingSpec(
feature_keys=['slice_does_not_exist']),
slicing_specs=[
config_pb2.SlicingSpec(feature_keys=['slice_does_not_exist'])
])
]
eval_config = config_pb2.EvalConfig(
model_specs=[
config_pb2.ModelSpec(
name='candidate',
label_key='label',
example_weight_key='example_weight'),
config_pb2.ModelSpec(
name='baseline',
label_key='label',
example_weight_key='example_weight',
is_baseline=True)
],
slicing_specs=slicing_specs,
cross_slicing_specs=cross_slicing_specs,
metrics_specs=[
config_pb2.MetricsSpec(
metrics=[
config_pb2.MetricConfig(
class_name='WeightedExampleCount',
per_slice_thresholds=[
config_pb2.PerSliceMetricThreshold(
slicing_specs=slicing_specs,
# 1.5 < 1, NOT OK.
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2
.GenericValueThreshold(
upper_bound={'value': 1})))
],
# missing cross slice
cross_slice_thresholds=[
config_pb2.CrossSliceMetricThreshold(
cross_slicing_specs=cross_slicing_specs,
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2
.GenericValueThreshold(
upper_bound={'value': 1})))
]),
config_pb2.MetricConfig(
class_name='ExampleCount',
# 2 > 10, NOT OK.
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2.GenericValueThreshold(
lower_bound={'value': 10}))),
config_pb2.MetricConfig(
class_name='MeanLabel',
# 0.5 > 1 and 0.5 > 1?: NOT OK.
threshold=config_pb2.MetricThreshold(
change_threshold=config_pb2.GenericChangeThreshold(
direction=config_pb2.MetricDirection
.HIGHER_IS_BETTER,
relative={'value': 1},
absolute={'value': 1}))),
config_pb2.MetricConfig(
# MeanPrediction = (0+0)/(1+0.5) = 0
class_name='MeanPrediction',
# -.01 < 0 < .01, OK.
# Diff% = -.333/.333 = -100% < -99%, OK.
# Diff = 0 - .333 = -.333 < 0, OK.
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2.GenericValueThreshold(
upper_bound={'value': .01},
lower_bound={'value': -.01}),
change_threshold=config_pb2.GenericChangeThreshold(
direction=config_pb2.MetricDirection
.LOWER_IS_BETTER,
relative={'value': -.99},
absolute={'value': 0})))
],
model_names=['candidate', 'baseline']),
],
options=config_pb2.Options(
disabled_outputs={'values': ['eval_config_pb2.json']}),
)
slice_spec = [
slicer.SingleSliceSpec(spec=s) for s in eval_config.slicing_specs
]
eval_shared_models = {
'candidate': eval_shared_model,
'baseline': baseline_eval_shared_model
}
extractors = [
features_extractor.FeaturesExtractor(eval_config),
labels_extractor.LabelsExtractor(eval_config),
example_weights_extractor.ExampleWeightsExtractor(eval_config),
predictions_extractor.PredictionsExtractor(
eval_shared_model=eval_shared_models,
eval_config=eval_config,
tensor_adapter_config=tensor_adapter_config),
unbatch_extractor.UnbatchExtractor(),
slice_key_extractor.SliceKeyExtractor(slice_spec=slice_spec)
]
evaluators = [
metrics_plots_and_validations_evaluator
.MetricsPlotsAndValidationsEvaluator(
eval_config=eval_config, eval_shared_model=eval_shared_models)
]
output_paths = {
constants.VALIDATIONS_KEY: validations_file,
}
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
add_metrics_callbacks=[],
output_file_format=output_file_format)
]
with beam.Pipeline() as pipeline:
# pylint: disable=no-value-for-parameter
_ = (
pipeline
| 'Create' >> beam.Create([e.SerializeToString() for e in examples])
| 'BatchExamples' >> tfx_io.BeamSource()
| 'InputsToExtracts' >> model_eval_lib.BatchedInputsToExtracts()
| 'ExtractEvaluate' >> model_eval_lib.ExtractAndEvaluate(
extractors=extractors, evaluators=evaluators)
| 'WriteResults' >> model_eval_lib.WriteResults(writers=writers))
# pylint: enable=no-value-for-parameter
validation_result = (
metrics_plots_and_validations_writer
.load_and_deserialize_validation_result(
os.path.dirname(validations_file), output_file_format))
expected_validations = [
text_format.Parse(
"""
metric_key {
name: "weighted_example_count"
model_name: "candidate"
}
metric_threshold {
value_threshold {
upper_bound {
value: 1.0
}
}
}
metric_value {
double_value {
value: 1.5
}
}
""", validation_result_pb2.ValidationFailure()),
text_format.Parse(
"""
metric_key {
name: "example_count"
model_name: "candidate"
}
metric_threshold {
value_threshold {
lower_bound {
value: 10.0
}
}
}
metric_value {
double_value {
value: 2.0
}
}
""", validation_result_pb2.ValidationFailure()),
text_format.Parse(
"""
metric_key {
name: "mean_label"
model_name: "candidate"
is_diff: true
}
metric_threshold {
change_threshold {
absolute {
value: 1.0
}
relative {
value: 1.0
}
direction: HIGHER_IS_BETTER
}
}
metric_value {
double_value {
value: 0.0
}
}
""", validation_result_pb2.ValidationFailure()),
]
self.assertFalse(validation_result.validation_ok)
self.assertFalse(validation_result.missing_thresholds)
self.assertLen(validation_result.metric_validations_per_slice, 1)
self.assertCountEqual(
expected_validations,
validation_result.metric_validations_per_slice[0].failures)
expected_missing_slices = [
config_pb2.SlicingSpec(feature_keys=['slice_does_not_exist'])
]
self.assertLen(validation_result.missing_slices, 1)
self.assertCountEqual(expected_missing_slices,
validation_result.missing_slices)
expected_missing_cross_slices = [
config_pb2.CrossSlicingSpec(
baseline_spec=config_pb2.SlicingSpec(
feature_keys=['slice_does_not_exist']),
slicing_specs=[
config_pb2.SlicingSpec(feature_keys=['slice_does_not_exist'])
])
]
self.assertLen(validation_result.missing_cross_slices, 1)
self.assertCountEqual(expected_missing_cross_slices,
validation_result.missing_cross_slices)
expected_slicing_details = [
text_format.Parse(
"""
slicing_spec {
}
num_matching_slices: 1
""", validation_result_pb2.SlicingDetails()),
]
self.assertLen(validation_result.validation_details.slicing_details, 1)
self.assertCountEqual(expected_slicing_details,
validation_result.validation_details.slicing_details)
@parameterized.named_parameters(_OUTPUT_FORMAT_PARAMS)
def testWriteValidationResultsNoThresholds(self, output_file_format):
model_dir, baseline_dir = self._getExportDir(), self._getBaselineDir()
eval_shared_model = self._build_keras_model(model_dir, mul=0)
baseline_eval_shared_model = self._build_keras_model(baseline_dir, mul=1)
validations_file = os.path.join(self._getTempDir(),
constants.VALIDATIONS_KEY)
schema = text_format.Parse(
"""
tensor_representation_group {
key: ""
value {
tensor_representation {
key: "input_1"
value {
dense_tensor {
column_name: "input_1"
shape { dim { size: 1 } }
}
}
}
}
}
feature {
name: "input_1"
type: FLOAT
}
feature {
name: "label"
type: FLOAT
}
feature {
name: "example_weight"
type: FLOAT
}
feature {
name: "extra_feature"
type: BYTES
}
""", schema_pb2.Schema())
tfx_io = test_util.InMemoryTFExampleRecord(
schema=schema, raw_record_column_name=constants.ARROW_INPUT_COLUMN)
tensor_adapter_config = tensor_adapter.TensorAdapterConfig(
arrow_schema=tfx_io.ArrowSchema(),
tensor_representations=tfx_io.TensorRepresentations())
examples = [
self._makeExample(
input_1=0.0,
label=1.0,
example_weight=1.0,
extra_feature='non_model_feature'),
self._makeExample(
input_1=1.0,
label=0.0,
example_weight=0.5,
extra_feature='non_model_feature'),
]
slicing_specs = [
config_pb2.SlicingSpec(),
]
eval_config = config_pb2.EvalConfig(
model_specs=[
config_pb2.ModelSpec(
name='candidate',
label_key='label',
example_weight_key='example_weight'),
config_pb2.ModelSpec(
name='baseline',
label_key='label',
example_weight_key='example_weight',
is_baseline=True)
],
slicing_specs=slicing_specs,
metrics_specs=[
config_pb2.MetricsSpec(
metrics=[
config_pb2.MetricConfig(class_name='WeightedExampleCount'),
config_pb2.MetricConfig(class_name='ExampleCount'),
config_pb2.MetricConfig(class_name='MeanLabel')
],
model_names=['candidate', 'baseline']),
],
options=config_pb2.Options(
disabled_outputs={'values': ['eval_config_pb2.json']}),
)
slice_spec = [
slicer.SingleSliceSpec(spec=s) for s in eval_config.slicing_specs
]
eval_shared_models = {
'candidate': eval_shared_model,
'baseline': baseline_eval_shared_model
}
extractors = [
features_extractor.FeaturesExtractor(eval_config),
labels_extractor.LabelsExtractor(eval_config),
example_weights_extractor.ExampleWeightsExtractor(eval_config),
predictions_extractor.PredictionsExtractor(
eval_shared_model=eval_shared_models,
eval_config=eval_config,
tensor_adapter_config=tensor_adapter_config),
unbatch_extractor.UnbatchExtractor(),
slice_key_extractor.SliceKeyExtractor(slice_spec=slice_spec)
]
evaluators = [
metrics_plots_and_validations_evaluator
.MetricsPlotsAndValidationsEvaluator(
eval_config=eval_config, eval_shared_model=eval_shared_models)
]
output_paths = {
constants.VALIDATIONS_KEY: validations_file,
}
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
add_metrics_callbacks=[],
output_file_format=output_file_format)
]
with beam.Pipeline() as pipeline:
# pylint: disable=no-value-for-parameter
_ = (
pipeline
| 'Create' >> beam.Create([e.SerializeToString() for e in examples])
| 'BatchExamples' >> tfx_io.BeamSource()
| 'InputsToExtracts' >> model_eval_lib.BatchedInputsToExtracts()
| 'ExtractEvaluate' >> model_eval_lib.ExtractAndEvaluate(
extractors=extractors, evaluators=evaluators)
| 'WriteResults' >> model_eval_lib.WriteResults(writers=writers))
# pylint: enable=no-value-for-parameter
validation_result = (
metrics_plots_and_validations_writer
.load_and_deserialize_validation_result(
os.path.dirname(validations_file), output_file_format))
self.assertFalse(validation_result.validation_ok)
self.assertTrue(validation_result.missing_thresholds)
self.assertEmpty(validation_result.metric_validations_per_slice)
# Add rubber stamp would make validation ok.
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
add_metrics_callbacks=[],
output_file_format=output_file_format,
rubber_stamp=True)
]
with beam.Pipeline() as pipeline:
# pylint: disable=no-value-for-parameter
_ = (
pipeline
| 'Create' >> beam.Create([e.SerializeToString() for e in examples])
| 'BatchExamples' >> tfx_io.BeamSource()
| 'InputsToExtracts' >> model_eval_lib.BatchedInputsToExtracts()
| 'ExtractEvaluate' >> model_eval_lib.ExtractAndEvaluate(
extractors=extractors, evaluators=evaluators)
| 'WriteResults' >> model_eval_lib.WriteResults(writers=writers))
# pylint: enable=no-value-for-parameter
validation_result = (
metrics_plots_and_validations_writer
.load_and_deserialize_validation_result(
os.path.dirname(validations_file), output_file_format))
self.assertTrue(validation_result.validation_ok)
self.assertFalse(validation_result.missing_thresholds)
self.assertEmpty(validation_result.metric_validations_per_slice)
self.assertTrue(validation_result.rubber_stamp)
@parameterized.named_parameters(_OUTPUT_FORMAT_PARAMS)
def testWriteMetricsAndPlots(self, output_file_format):
metrics_file = os.path.join(self._getTempDir(), 'metrics')
plots_file = os.path.join(self._getTempDir(), 'plots')
temp_eval_export_dir = os.path.join(self._getTempDir(), 'eval_export_dir')
_, eval_export_dir = (
fixed_prediction_estimator.simple_fixed_prediction_estimator(
None, temp_eval_export_dir))
eval_config = config_pb2.EvalConfig(
model_specs=[config_pb2.ModelSpec()],
options=config_pb2.Options(
disabled_outputs={'values': ['eval_config_pb2.json']}))
eval_shared_model = self.createTestEvalSharedModel(
eval_saved_model_path=eval_export_dir,
add_metrics_callbacks=[
post_export_metrics.example_count(),
post_export_metrics.calibration_plot_and_prediction_histogram(
num_buckets=2)
])
extractors = [
legacy_predict_extractor.PredictExtractor(
eval_shared_model, eval_config=eval_config),
unbatch_extractor.UnbatchExtractor(),
slice_key_extractor.SliceKeyExtractor()
]
evaluators = [
legacy_metrics_and_plots_evaluator.MetricsAndPlotsEvaluator(
eval_shared_model)
]
output_paths = {
constants.METRICS_KEY: metrics_file,
constants.PLOTS_KEY: plots_file
}
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
add_metrics_callbacks=eval_shared_model.add_metrics_callbacks,
output_file_format=output_file_format)
]
tfx_io = raw_tf_record.RawBeamRecordTFXIO(
physical_format='inmemory',
raw_record_column_name=constants.ARROW_INPUT_COLUMN,
telemetry_descriptors=['TFMATest'])
with beam.Pipeline() as pipeline:
example1 = self._makeExample(prediction=0.0, label=1.0)
example2 = self._makeExample(prediction=1.0, label=1.0)
# pylint: disable=no-value-for-parameter
_ = (
pipeline
| 'Create' >> beam.Create([
example1.SerializeToString(),
example2.SerializeToString(),
])
| 'BatchExamples' >> tfx_io.BeamSource()
| 'ExtractEvaluateAndWriteResults' >>
model_eval_lib.ExtractEvaluateAndWriteResults(
eval_config=eval_config,
eval_shared_model=eval_shared_model,
extractors=extractors,
evaluators=evaluators,
writers=writers))
# pylint: enable=no-value-for-parameter
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {}
metrics {
key: "average_loss"
value {
double_value {
value: 0.5
}
}
}
metrics {
key: "post_export_metrics/example_count"
value {
double_value {
value: 2.0
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
metric_records = list(
metrics_plots_and_validations_writer.load_and_deserialize_metrics(
metrics_file, output_file_format))
self.assertLen(metric_records, 1, 'metrics: %s' % metric_records)
self.assertProtoEquals(expected_metrics_for_slice, metric_records[0])
expected_plots_for_slice = text_format.Parse(
"""
slice_key {}
plots {
key: "post_export_metrics"
value {
calibration_histogram_buckets {
buckets {
lower_threshold_inclusive: -inf
num_weighted_examples {}
total_weighted_label {}
total_weighted_refined_prediction {}
}
buckets {
upper_threshold_exclusive: 0.5
num_weighted_examples {
value: 1.0
}
total_weighted_label {
value: 1.0
}
total_weighted_refined_prediction {}
}
buckets {
lower_threshold_inclusive: 0.5
upper_threshold_exclusive: 1.0
num_weighted_examples {
}
total_weighted_label {}
total_weighted_refined_prediction {}
}
buckets {
lower_threshold_inclusive: 1.0
upper_threshold_exclusive: inf
num_weighted_examples {
value: 1.0
}
total_weighted_label {
value: 1.0
}
total_weighted_refined_prediction {
value: 1.0
}
}
}
}
}
""", metrics_for_slice_pb2.PlotsForSlice())
plot_records = list(
metrics_plots_and_validations_writer.load_and_deserialize_plots(
plots_file, output_file_format))
self.assertLen(plot_records, 1, 'plots: %s' % plot_records)
self.assertProtoEquals(expected_plots_for_slice, plot_records[0])
@parameterized.named_parameters(('parquet_file_format', 'parquet'))
def testLoadAndDeserializeFilteredMetricsAndPlots(self, output_file_format):
metrics_file = os.path.join(self._getTempDir(), 'metrics')
plots_file = os.path.join(self._getTempDir(), 'plots')
temp_eval_export_dir = os.path.join(self._getTempDir(), 'eval_export_dir')
_, eval_export_dir = (
fixed_prediction_estimator.simple_fixed_prediction_estimator(
None, temp_eval_export_dir))
eval_config = config_pb2.EvalConfig(
model_specs=[config_pb2.ModelSpec()],
slicing_specs=[
config_pb2.SlicingSpec(),
config_pb2.SlicingSpec(feature_keys=['prediction'])
],
options=config_pb2.Options(
disabled_outputs={'values': ['eval_config_pb2.json']}))
eval_shared_model = self.createTestEvalSharedModel(
eval_saved_model_path=eval_export_dir,
add_metrics_callbacks=[
post_export_metrics.example_count(),
post_export_metrics.calibration_plot_and_prediction_histogram(
num_buckets=2)
])
extractors = [
legacy_predict_extractor.PredictExtractor(
eval_shared_model, eval_config=eval_config),
unbatch_extractor.UnbatchExtractor(),
slice_key_extractor.SliceKeyExtractor(
eval_config=eval_config, materialize=False)
]
evaluators = [
legacy_metrics_and_plots_evaluator.MetricsAndPlotsEvaluator(
eval_shared_model)
]
output_paths = {
constants.METRICS_KEY: metrics_file,
constants.PLOTS_KEY: plots_file
}
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
add_metrics_callbacks=eval_shared_model.add_metrics_callbacks,
output_file_format=output_file_format)
]
tfx_io = raw_tf_record.RawBeamRecordTFXIO(
physical_format='inmemory',
raw_record_column_name=constants.ARROW_INPUT_COLUMN,
telemetry_descriptors=['TFMATest'])
with beam.Pipeline() as pipeline:
example1 = self._makeExample(prediction=0.0, label=1.0, country='US')
example2 = self._makeExample(prediction=1.0, label=1.0, country='CA')
# pylint: disable=no-value-for-parameter
_ = (
pipeline
| 'Create' >> beam.Create([
example1.SerializeToString(),
example2.SerializeToString(),
])
| 'BatchExamples' >> tfx_io.BeamSource()
| 'ExtractEvaluateAndWriteResults' >>
model_eval_lib.ExtractEvaluateAndWriteResults(
eval_config=eval_config,
eval_shared_model=eval_shared_model,
extractors=extractors,
evaluators=evaluators,
writers=writers))
# pylint: enable=no-value-for-parameter
# only read the metrics with slice keys that match the following spec
slice_keys_filter = [slicer.SingleSliceSpec(features=[('prediction', 0)])]
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {
single_slice_keys {
column: "prediction"
float_value: 0
}
}
metrics {
key: "average_loss"
value {
double_value {
value: 1.0
}
}
}
metrics {
key: "post_export_metrics/example_count"
value {
double_value {
value: 1.0
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
metric_records = list(
metrics_plots_and_validations_writer.load_and_deserialize_metrics(
metrics_file, output_file_format, slice_keys_filter))
self.assertLen(metric_records, 1, 'metrics: %s' % metric_records)
self.assertProtoEquals(expected_metrics_for_slice, metric_records[0])
expected_plots_for_slice = text_format.Parse(
"""
slice_key {
single_slice_keys {
column: "prediction"
float_value: 0
}
}
plots {
key: "post_export_metrics"
value {
calibration_histogram_buckets {
buckets {
lower_threshold_inclusive: -inf
num_weighted_examples {}
total_weighted_label {}
total_weighted_refined_prediction {}
}
buckets {
upper_threshold_exclusive: 0.5
num_weighted_examples {
value: 1.0
}
total_weighted_label {
value: 1.0
}
total_weighted_refined_prediction {}
}
buckets {
lower_threshold_inclusive: 0.5
upper_threshold_exclusive: 1.0
num_weighted_examples {
}
total_weighted_label {}
total_weighted_refined_prediction {}
}
buckets {
lower_threshold_inclusive: 1.0
upper_threshold_exclusive: inf
num_weighted_examples {
value: 0.0
}
total_weighted_label {
value: 0.0
}
total_weighted_refined_prediction {
value: 0.0
}
}
}
}
}
""", metrics_for_slice_pb2.PlotsForSlice())
plot_records = list(
metrics_plots_and_validations_writer.load_and_deserialize_plots(
plots_file, output_file_format, slice_keys_filter))
self.assertLen(plot_records, 1, 'plots: %s' % plot_records)
self.assertProtoEquals(expected_plots_for_slice, plot_records[0])
@parameterized.named_parameters(_OUTPUT_FORMAT_PARAMS)
def testWriteAttributions(self, output_file_format):
attributions_file = os.path.join(self._getTempDir(), 'attributions')
eval_config = config_pb2.EvalConfig(
model_specs=[config_pb2.ModelSpec()],
metrics_specs=[
config_pb2.MetricsSpec(metrics=[
config_pb2.MetricConfig(class_name=attributions
.TotalAttributions().__class__.__name__)
])
],
options=config_pb2.Options(
disabled_outputs={'values': ['eval_config.json']}))
extractors = [slice_key_extractor.SliceKeyExtractor()]
evaluators = [
metrics_plots_and_validations_evaluator
.MetricsPlotsAndValidationsEvaluator(eval_config=eval_config)
]
output_paths = {
constants.ATTRIBUTIONS_KEY: attributions_file,
}
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
output_file_format=output_file_format)
]
example1 = {
'features': {},
'attributions': {
'feature1': 1.1,
'feature2': 1.2
}
}
example2 = {
'features': {},
'attributions': {
'feature1': 2.1,
'feature2': 2.2
}
}
example3 = {
'features': {},
'attributions': {
'feature1': np.array([3.1]),
'feature2': np.array([3.2])
}
}
with beam.Pipeline() as pipeline:
# pylint: disable=no-value-for-parameter
_ = (
pipeline
| 'Create' >> beam.Create([example1, example2, example3])
| 'ExtractEvaluate' >> model_eval_lib.ExtractAndEvaluate(
extractors=extractors, evaluators=evaluators)
| 'WriteResults' >> model_eval_lib.WriteResults(writers=writers))
# pylint: enable=no-value-for-parameter
expected_attributions_for_slice = text_format.Parse(
"""
slice_key {}
attributions_keys_and_values {
key {
name: "total_attributions"
}
values {
key: "feature1"
value: {
double_value {
value: 6.3
}
}
}
values {
key: "feature2"
value: {
double_value {
value: 6.6
}
}
}
}""", metrics_for_slice_pb2.AttributionsForSlice())
attribution_records = list(
metrics_plots_and_validations_writer.load_and_deserialize_attributions(
attributions_file, output_file_format))
self.assertLen(attribution_records, 1)
self.assertProtoEquals(expected_attributions_for_slice,
attribution_records[0])
if __name__ == '__main__':
tf.compat.v1.enable_v2_behavior()
tf.test.main()
| 33.695256 | 98 | 0.543998 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import string
import tempfile
from absl.testing import parameterized
import apache_beam as beam
from apache_beam.testing import util
import numpy as np
import tensorflow as tf
from tensorflow_model_analysis import constants
from tensorflow_model_analysis import types
from tensorflow_model_analysis.api import model_eval_lib
from tensorflow_model_analysis.eval_saved_model import testutil
from tensorflow_model_analysis.eval_saved_model.example_trainers import fixed_prediction_estimator
from tensorflow_model_analysis.evaluators import legacy_metrics_and_plots_evaluator
from tensorflow_model_analysis.evaluators import metrics_plots_and_validations_evaluator
from tensorflow_model_analysis.extractors import example_weights_extractor
from tensorflow_model_analysis.extractors import features_extractor
from tensorflow_model_analysis.extractors import labels_extractor
from tensorflow_model_analysis.extractors import legacy_predict_extractor
from tensorflow_model_analysis.extractors import predictions_extractor
from tensorflow_model_analysis.extractors import slice_key_extractor
from tensorflow_model_analysis.extractors import unbatch_extractor
from tensorflow_model_analysis.metrics import attributions
from tensorflow_model_analysis.metrics import binary_confusion_matrices
from tensorflow_model_analysis.metrics import metric_types
from tensorflow_model_analysis.post_export_metrics import metric_keys
from tensorflow_model_analysis.post_export_metrics import post_export_metrics
from tensorflow_model_analysis.proto import config_pb2
from tensorflow_model_analysis.proto import metrics_for_slice_pb2
from tensorflow_model_analysis.proto import validation_result_pb2
from tensorflow_model_analysis.slicer import slicer_lib as slicer
from tensorflow_model_analysis.writers import metrics_plots_and_validations_writer
from tfx_bsl.tfxio import raw_tf_record
from tfx_bsl.tfxio import tensor_adapter
from tfx_bsl.tfxio import test_util
from google.protobuf import text_format
from tensorflow_metadata.proto.v0 import schema_pb2
def _make_slice_key(*args):
if len(args) % 2 != 0:
raise ValueError('number of arguments should be even')
result = []
for i in range(0, len(args), 2):
result.append((args[i], args[i + 1]))
result = tuple(result)
return result
class MetricsPlotsAndValidationsWriterTest(testutil.TensorflowModelAnalysisTest,
parameterized.TestCase):
def setUp(self):
super(MetricsPlotsAndValidationsWriterTest, self).setUp()
self.longMessage = True
def _getTempDir(self):
return tempfile.mkdtemp()
def _getExportDir(self):
return os.path.join(self._getTempDir(), 'export_dir')
def _getBaselineDir(self):
return os.path.join(self._getTempDir(), 'baseline_export_dir')
def _build_keras_model(self, model_dir, mul):
input_layer = tf.keras.layers.Input(shape=(1,), name='input_1')
output_layer = tf.keras.layers.Lambda(
lambda x, mul: x * mul, output_shape=(1,), arguments={'mul': mul})(
input_layer)
model = tf.keras.models.Model([input_layer], output_layer)
model.compile(
optimizer=tf.keras.optimizers.Adam(lr=.001),
loss=tf.keras.losses.BinaryCrossentropy(),
metrics=['accuracy'])
model.fit(x=[[0], [1]], y=[[0], [1]], steps_per_epoch=1)
model.save(model_dir, save_format='tf')
return self.createTestEvalSharedModel(
eval_saved_model_path=model_dir, tags=[tf.saved_model.SERVING])
def testConvertSlicePlotsToProto(self):
slice_key = _make_slice_key('fruit', 'apple')
plot_key = metric_types.PlotKey(
name='calibration_plot', output_name='output_name')
calibration_plot = text_format.Parse(
"""
buckets {
lower_threshold_inclusive: -inf
upper_threshold_exclusive: 0.0
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
buckets {
lower_threshold_inclusive: 0.0
upper_threshold_exclusive: 0.5
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 1.0 }
total_weighted_refined_prediction { value: 0.3 }
}
buckets {
lower_threshold_inclusive: 0.5
upper_threshold_exclusive: 1.0
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.7 }
}
buckets {
lower_threshold_inclusive: 1.0
upper_threshold_exclusive: inf
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
""", metrics_for_slice_pb2.CalibrationHistogramBuckets())
expected_plots_for_slice = text_format.Parse(
"""
slice_key {
single_slice_keys {
column: 'fruit'
bytes_value: 'apple'
}
}
plot_keys_and_values {
key {
output_name: "output_name"
}
value {
calibration_histogram_buckets {
buckets {
lower_threshold_inclusive: -inf
upper_threshold_exclusive: 0.0
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
buckets {
lower_threshold_inclusive: 0.0
upper_threshold_exclusive: 0.5
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 1.0 }
total_weighted_refined_prediction { value: 0.3 }
}
buckets {
lower_threshold_inclusive: 0.5
upper_threshold_exclusive: 1.0
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.7 }
}
buckets {
lower_threshold_inclusive: 1.0
upper_threshold_exclusive: inf
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
}
}
}
""", metrics_for_slice_pb2.PlotsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_plots_to_proto(
(slice_key, {
plot_key: calibration_plot
}), None)
self.assertProtoEquals(expected_plots_for_slice, got)
def testConvertSlicePlotsToProtoLegacyStringKeys(self):
slice_key = _make_slice_key('fruit', 'apple')
tfma_plots = {
metric_keys.CALIBRATION_PLOT_MATRICES:
np.array([
[0.0, 0.0, 0.0],
[0.3, 1.0, 1.0],
[0.7, 0.0, 1.0],
[0.0, 0.0, 0.0],
]),
metric_keys.CALIBRATION_PLOT_BOUNDARIES:
np.array([0.0, 0.5, 1.0]),
}
expected_plot_data = """
slice_key {
single_slice_keys {
column: 'fruit'
bytes_value: 'apple'
}
}
plots {
key: "post_export_metrics"
value {
calibration_histogram_buckets {
buckets {
lower_threshold_inclusive: -inf
upper_threshold_exclusive: 0.0
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
buckets {
lower_threshold_inclusive: 0.0
upper_threshold_exclusive: 0.5
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 1.0 }
total_weighted_refined_prediction { value: 0.3 }
}
buckets {
lower_threshold_inclusive: 0.5
upper_threshold_exclusive: 1.0
num_weighted_examples { value: 1.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.7 }
}
buckets {
lower_threshold_inclusive: 1.0
upper_threshold_exclusive: inf
num_weighted_examples { value: 0.0 }
total_weighted_label { value: 0.0 }
total_weighted_refined_prediction { value: 0.0 }
}
}
}
}
"""
calibration_plot = (
post_export_metrics.calibration_plot_and_prediction_histogram())
got = metrics_plots_and_validations_writer.convert_slice_plots_to_proto(
(slice_key, tfma_plots), [calibration_plot])
self.assertProtoEquals(expected_plot_data, got)
def testConvertSlicePlotsToProtoEmptyPlot(self):
slice_key = _make_slice_key('fruit', 'apple')
tfma_plots = {metric_keys.ERROR_METRIC: 'error_message'}
actual_plot = metrics_plots_and_validations_writer.convert_slice_plots_to_proto(
(slice_key, tfma_plots), [])
expected_plot = metrics_for_slice_pb2.PlotsForSlice()
expected_plot.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
expected_plot.plots[
metric_keys.ERROR_METRIC].debug_message = 'error_message'
self.assertProtoEquals(expected_plot, actual_plot)
def testConvertSliceMetricsToProto(self):
slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3)
slice_metrics = {
metric_types.MetricKey(name='accuracy', output_name='output_name'): 0.8
}
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {
single_slice_keys {
column: 'age'
int64_value: 5
}
single_slice_keys {
column: 'language'
bytes_value: 'english'
}
single_slice_keys {
column: 'price'
float_value: 0.3
}
}
metric_keys_and_values {
key {
name: "accuracy"
output_name: "output_name"
}
value {
double_value {
value: 0.8
}
}
}""", metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics), None)
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoConfusionMatrices(self):
slice_key = _make_slice_key()
slice_metrics = {
metric_types.MetricKey(name='confusion_matrix_at_thresholds'):
binary_confusion_matrices.Matrices(
thresholds=[0.25, 0.75, 1.00],
fn=[0.0, 1.0, 2.0],
tn=[1.0, 1.0, 1.0],
fp=[0.0, 0.0, 0.0],
tp=[2.0, 1.0, 0.0])
}
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {}
metric_keys_and_values {
key: { name: "confusion_matrix_at_thresholds" }
value {
confusion_matrix_at_thresholds {
matrices {
threshold: 0.25
false_negatives: 0.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 2.0
precision: 1.0
recall: 1.0
}
matrices {
threshold: 0.75
false_negatives: 1.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 1.0
precision: 1.0
recall: 0.5
}
matrices {
threshold: 1.00
false_negatives: 2.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 0.0
precision: 1.0
recall: 0.0
}
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics), add_metrics_callbacks=[])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoConfusionMatricesPostExport(self):
slice_key = _make_slice_key()
thresholds = [0.25, 0.75, 1.00]
matrices = [[0.0, 1.0, 0.0, 2.0, 1.0, 1.0], [1.0, 1.0, 0.0, 1.0, 1.0, 0.5],
[2.0, 1.0, 0.0, 0.0, float('nan'), 0.0]]
slice_metrics = {
metric_keys.CONFUSION_MATRIX_AT_THRESHOLDS_MATRICES: matrices,
metric_keys.CONFUSION_MATRIX_AT_THRESHOLDS_THRESHOLDS: thresholds,
}
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {}
metrics {
key: "post_export_metrics/confusion_matrix_at_thresholds"
value {
confusion_matrix_at_thresholds {
matrices {
threshold: 0.25
false_negatives: 0.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 2.0
precision: 1.0
recall: 1.0
bounded_false_negatives {
value {
value: 0.0
}
}
bounded_true_negatives {
value {
value: 1.0
}
}
bounded_true_positives {
value {
value: 2.0
}
}
bounded_false_positives {
value {
value: 0.0
}
}
bounded_precision {
value {
value: 1.0
}
}
bounded_recall {
value {
value: 1.0
}
}
t_distribution_false_negatives {
unsampled_value {
value: 0.0
}
}
t_distribution_true_negatives {
unsampled_value {
value: 1.0
}
}
t_distribution_true_positives {
unsampled_value {
value: 2.0
}
}
t_distribution_false_positives {
unsampled_value {
value: 0.0
}
}
t_distribution_precision {
unsampled_value {
value: 1.0
}
}
t_distribution_recall {
unsampled_value {
value: 1.0
}
}
}
matrices {
threshold: 0.75
false_negatives: 1.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 1.0
precision: 1.0
recall: 0.5
bounded_false_negatives {
value {
value: 1.0
}
}
bounded_true_negatives {
value {
value: 1.0
}
}
bounded_true_positives {
value {
value: 1.0
}
}
bounded_false_positives {
value {
value: 0.0
}
}
bounded_precision {
value {
value: 1.0
}
}
bounded_recall {
value {
value: 0.5
}
}
t_distribution_false_negatives {
unsampled_value {
value: 1.0
}
}
t_distribution_true_negatives {
unsampled_value {
value: 1.0
}
}
t_distribution_true_positives {
unsampled_value {
value: 1.0
}
}
t_distribution_false_positives {
unsampled_value {
value: 0.0
}
}
t_distribution_precision {
unsampled_value {
value: 1.0
}
}
t_distribution_recall {
unsampled_value {
value: 0.5
}
}
}
matrices {
threshold: 1.00
false_negatives: 2.0
true_negatives: 1.0
false_positives: 0.0
true_positives: 0.0
precision: nan
recall: 0.0
bounded_false_negatives {
value {
value: 2.0
}
}
bounded_true_negatives {
value {
value: 1.0
}
}
bounded_true_positives {
value {
value: 0.0
}
}
bounded_false_positives {
value {
value: 0.0
}
}
bounded_precision {
value {
value: nan
}
}
bounded_recall {
value {
value: 0.0
}
}
t_distribution_false_negatives {
unsampled_value {
value: 2.0
}
}
t_distribution_true_negatives {
unsampled_value {
value: 1.0
}
}
t_distribution_true_positives {
unsampled_value {
value: 0.0
}
}
t_distribution_false_positives {
unsampled_value {
value: 0.0
}
}
t_distribution_precision {
unsampled_value {
value: nan
}
}
t_distribution_recall {
unsampled_value {
value: 0.0
}
}
}
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics),
[post_export_metrics.confusion_matrix_at_thresholds(thresholds)])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoMetricsRanges(self):
slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3)
slice_metrics = {
'accuracy': types.ValueWithTDistribution(0.8, 0.1, 9, 0.8),
metric_keys.AUPRC: 0.1,
metric_keys.lower_bound_key(metric_keys.AUPRC): 0.05,
metric_keys.upper_bound_key(metric_keys.AUPRC): 0.17,
metric_keys.AUC: 0.2,
metric_keys.lower_bound_key(metric_keys.AUC): 0.1,
metric_keys.upper_bound_key(metric_keys.AUC): 0.3
}
expected_metrics_for_slice = text_format.Parse(
string.Template("""
slice_key {
single_slice_keys {
column: 'age'
int64_value: 5
}
single_slice_keys {
column: 'language'
bytes_value: 'english'
}
single_slice_keys {
column: 'price'
float_value: 0.3
}
}
metrics {
key: "accuracy"
value {
bounded_value {
value {
value: 0.8
}
lower_bound { value: 0.5737843 }
upper_bound { value: 1.0262157 }
methodology: POISSON_BOOTSTRAP
}
}
}
metrics {
key: "$auc"
value {
bounded_value {
lower_bound {
value: 0.1
}
upper_bound {
value: 0.3
}
value {
value: 0.2
}
methodology: RIEMANN_SUM
}
}
}
metrics {
key: "$auprc"
value {
bounded_value {
lower_bound {
value: 0.05
}
upper_bound {
value: 0.17
}
value {
value: 0.1
}
methodology: RIEMANN_SUM
}
}
}""").substitute(auc=metric_keys.AUC, auprc=metric_keys.AUPRC),
metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics),
[post_export_metrics.auc(),
post_export_metrics.auc(curve='PR')])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoFromLegacyStrings(self):
slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3)
slice_metrics = {
'accuracy': 0.8,
metric_keys.AUPRC: 0.1,
metric_keys.lower_bound_key(metric_keys.AUPRC): 0.05,
metric_keys.upper_bound_key(metric_keys.AUPRC): 0.17,
metric_keys.AUC: 0.2,
metric_keys.lower_bound_key(metric_keys.AUC): 0.1,
metric_keys.upper_bound_key(metric_keys.AUC): 0.3
}
expected_metrics_for_slice = text_format.Parse(
string.Template("""
slice_key {
single_slice_keys {
column: 'age'
int64_value: 5
}
single_slice_keys {
column: 'language'
bytes_value: 'english'
}
single_slice_keys {
column: 'price'
float_value: 0.3
}
}
metrics {
key: "accuracy"
value {
double_value {
value: 0.8
}
}
}
metrics {
key: "$auc"
value {
bounded_value {
lower_bound {
value: 0.1
}
upper_bound {
value: 0.3
}
value {
value: 0.2
}
methodology: RIEMANN_SUM
}
}
}
metrics {
key: "$auprc"
value {
bounded_value {
lower_bound {
value: 0.05
}
upper_bound {
value: 0.17
}
value {
value: 0.1
}
methodology: RIEMANN_SUM
}
}
}""").substitute(auc=metric_keys.AUC, auprc=metric_keys.AUPRC),
metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics),
[post_export_metrics.auc(),
post_export_metrics.auc(curve='PR')])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoEmptyMetrics(self):
slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3)
slice_metrics = {metric_keys.ERROR_METRIC: 'error_message'}
actual_metrics = (
metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics),
[post_export_metrics.auc(),
post_export_metrics.auc(curve='PR')]))
expected_metrics = metrics_for_slice_pb2.MetricsForSlice()
expected_metrics.slice_key.CopyFrom(slicer.serialize_slice_key(slice_key))
expected_metrics.metrics[
metric_keys.ERROR_METRIC].debug_message = 'error_message'
self.assertProtoEquals(expected_metrics, actual_metrics)
def testConvertSliceMetricsToProtoStringMetrics(self):
slice_key = _make_slice_key()
slice_metrics = {
'valid_ascii': b'test string',
'valid_unicode': b'\xF0\x9F\x90\x84',
'invalid_unicode': b'\xE2\x28\xA1',
}
expected_metrics_for_slice = metrics_for_slice_pb2.MetricsForSlice()
expected_metrics_for_slice.slice_key.SetInParent()
expected_metrics_for_slice.metrics[
'valid_ascii'].bytes_value = slice_metrics['valid_ascii']
expected_metrics_for_slice.metrics[
'valid_unicode'].bytes_value = slice_metrics['valid_unicode']
expected_metrics_for_slice.metrics[
'invalid_unicode'].bytes_value = slice_metrics['invalid_unicode']
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics), [])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testCombineValidationsValidationOk(self):
input_validations = [
text_format.Parse(
"""
validation_ok: true
metric_validations_per_slice {
slice_key {
single_slice_keys {
column: "x"
bytes_value: "x1"
}
}
}
validation_details {
slicing_details {
slicing_spec {}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 1
}
}""", validation_result_pb2.ValidationResult()),
text_format.Parse(
"""
validation_ok: true
validation_details {
slicing_details {
slicing_spec {
feature_keys: ["x"]
}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 2
}
}""", validation_result_pb2.ValidationResult())
]
eval_config = config_pb2.EvalConfig(
model_specs=[
config_pb2.ModelSpec(name='candidate'),
config_pb2.ModelSpec(name='baseline', is_baseline=True)
],
slicing_specs=[config_pb2.SlicingSpec()],
metrics_specs=[
config_pb2.MetricsSpec(
metrics=[
config_pb2.MetricConfig(
class_name='AUC',
per_slice_thresholds=[
config_pb2.PerSliceMetricThreshold(
slicing_specs=[config_pb2.SlicingSpec()],
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2
.GenericValueThreshold(
lower_bound={'value': 0.7})))
]),
],
model_names=['candidate', 'baseline']),
])
expected_validation = text_format.Parse(
"""
validation_ok: true
metric_validations_per_slice {
slice_key {
single_slice_keys {
column: "x"
bytes_value: "x1"
}
}
}
validation_details {
slicing_details {
slicing_spec {}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 3
}
slicing_details {
slicing_spec {
feature_keys: ["x"]
}
num_matching_slices: 1
}
}""", validation_result_pb2.ValidationResult())
def verify_fn(result):
self.assertLen(result, 1)
self.assertProtoEquals(expected_validation, result[0])
with beam.Pipeline() as pipeline:
result = (
pipeline
| 'Create' >> beam.Create(input_validations)
| 'CombineValidations' >> beam.CombineGlobally(
metrics_plots_and_validations_writer.CombineValidations(
eval_config)))
util.assert_that(result, verify_fn)
def testCombineValidationsMissingSlices(self):
input_validations = [
text_format.Parse(
"""
validation_ok: false
metric_validations_per_slice {
slice_key {
single_slice_keys {
column: "x"
bytes_value: "x1"
}
}
failures {
metric_key {
name: "auc"
model_name: "candidate"
is_diff: true
}
metric_threshold {
value_threshold {
lower_bound { value: 0.7 }
}
}
metric_value {
double_value { value: 0.6 }
}
}
}
validation_details {
slicing_details {
slicing_spec {}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 1
}
}""", validation_result_pb2.ValidationResult()),
text_format.Parse(
"""
validation_ok: true
validation_details {
slicing_details {
slicing_spec {
feature_keys: ["x"]
}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 2
}
}""", validation_result_pb2.ValidationResult())
]
slicing_specs = [
config_pb2.SlicingSpec(),
config_pb2.SlicingSpec(feature_keys=['x']),
config_pb2.SlicingSpec(feature_keys=['x', 'y']),
config_pb2.SlicingSpec(feature_keys=['z']),
]
eval_config = config_pb2.EvalConfig(
model_specs=[
config_pb2.ModelSpec(name='candidate'),
config_pb2.ModelSpec(name='baseline', is_baseline=True)
],
slicing_specs=slicing_specs,
metrics_specs=[
config_pb2.MetricsSpec(
metrics=[
config_pb2.MetricConfig(
class_name='AUC',
per_slice_thresholds=[
config_pb2.PerSliceMetricThreshold(
slicing_specs=slicing_specs,
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2
.GenericValueThreshold(
lower_bound={'value': 0.7})))
]),
],
model_names=['candidate', 'baseline']),
])
expected_validation = text_format.Parse(
"""
validation_ok: false
metric_validations_per_slice {
slice_key {
single_slice_keys {
column: "x"
bytes_value: "x1"
}
}
failures {
metric_key {
name: "auc"
model_name: "candidate"
is_diff: true
}
metric_threshold {
value_threshold {
lower_bound { value: 0.7 }
}
}
metric_value {
double_value { value: 0.6 }
}
}
}
missing_slices {
feature_keys: "z"
}
validation_details {
slicing_details {
slicing_spec {}
num_matching_slices: 1
}
slicing_details {
slicing_spec {
feature_keys: ["x", "y"]
}
num_matching_slices: 3
}
slicing_details {
slicing_spec {
feature_keys: ["x"]
}
num_matching_slices: 1
}
}""", validation_result_pb2.ValidationResult())
def verify_fn(result):
self.assertLen(result, 1)
self.assertProtoEquals(expected_validation, result[0])
with beam.Pipeline() as pipeline:
result = (
pipeline
| 'Create' >> beam.Create(input_validations)
| 'CombineValidations' >> beam.CombineGlobally(
metrics_plots_and_validations_writer.CombineValidations(
eval_config)))
util.assert_that(result, verify_fn)
def testUncertaintyValuedMetrics(self):
slice_key = _make_slice_key()
slice_metrics = {
'one_dim':
types.ValueWithTDistribution(2.0, 1.0, 3, 2.0),
'nans':
types.ValueWithTDistribution(
float('nan'), float('nan'), -1, float('nan')),
}
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {}
metrics {
key: "one_dim"
value {
bounded_value {
value {
value: 2.0
}
lower_bound {
value: -1.1824463
}
upper_bound {
value: 5.1824463
}
methodology: POISSON_BOOTSTRAP
}
}
}
metrics {
key: "nans"
value {
bounded_value {
value {
value: nan
}
lower_bound {
value: nan
}
upper_bound {
value: nan
}
methodology: POISSON_BOOTSTRAP
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics), [])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoTensorValuedMetrics(self):
slice_key = _make_slice_key()
slice_metrics = {
'one_dim':
np.array([1.0, 2.0, 3.0, 4.0], dtype=np.float32),
'two_dims':
np.array([['two', 'dims', 'test'], ['TWO', 'DIMS', 'TEST']]),
'three_dims':
np.array([[[100, 200, 300]], [[500, 600, 700]]], dtype=np.int64),
}
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {}
metrics {
key: "one_dim"
value {
array_value {
data_type: FLOAT32
shape: 4
float32_values: [1.0, 2.0, 3.0, 4.0]
}
}
}
metrics {
key: "two_dims"
value {
array_value {
data_type: BYTES
shape: [2, 3]
bytes_values: ["two", "dims", "test", "TWO", "DIMS", "TEST"]
}
}
}
metrics {
key: "three_dims"
value {
array_value {
data_type: INT64
shape: [2, 1, 3]
int64_values: [100, 200, 300, 500, 600, 700]
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto(
(slice_key, slice_metrics), [])
self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceAttributionsToProto(self):
slice_key = _make_slice_key('language', 'english', 'price', 0.3)
slice_attributions = {
metric_types.AttributionsKey(name='mean', output_name='output_name'): {
'age': 0.8,
'language': 1.2,
'price': 2.3,
},
}
expected_attributions_for_slice = text_format.Parse(
"""
slice_key {
single_slice_keys {
column: 'language'
bytes_value: 'english'
}
single_slice_keys {
column: 'price'
float_value: 0.3
}
}
attributions_keys_and_values {
key {
name: "mean"
output_name: "output_name"
}
values {
key: "age"
value: {
double_value {
value: 0.8
}
}
}
values {
key: "language"
value: {
double_value {
value: 1.2
}
}
}
values {
key: "price"
value: {
double_value {
value: 2.3
}
}
}
}""", metrics_for_slice_pb2.AttributionsForSlice())
got = metrics_plots_and_validations_writer.convert_slice_attributions_to_proto(
(slice_key, slice_attributions))
self.assertProtoEquals(expected_attributions_for_slice, got)
_OUTPUT_FORMAT_PARAMS = [('without_output_file_format', ''),
('tfrecord_file_format', 'tfrecord'),
('parquet_file_format', 'parquet')]
@parameterized.named_parameters(_OUTPUT_FORMAT_PARAMS)
def testWriteValidationResults(self, output_file_format):
model_dir, baseline_dir = self._getExportDir(), self._getBaselineDir()
eval_shared_model = self._build_keras_model(model_dir, mul=0)
baseline_eval_shared_model = self._build_keras_model(baseline_dir, mul=1)
validations_file = os.path.join(self._getTempDir(),
constants.VALIDATIONS_KEY)
schema = text_format.Parse(
"""
tensor_representation_group {
key: ""
value {
tensor_representation {
key: "input_1"
value {
dense_tensor {
column_name: "input_1"
shape { dim { size: 1 } }
}
}
}
}
}
feature {
name: "input_1"
type: FLOAT
}
feature {
name: "label"
type: FLOAT
}
feature {
name: "example_weight"
type: FLOAT
}
feature {
name: "extra_feature"
type: BYTES
}
""", schema_pb2.Schema())
tfx_io = test_util.InMemoryTFExampleRecord(
schema=schema, raw_record_column_name=constants.ARROW_INPUT_COLUMN)
tensor_adapter_config = tensor_adapter.TensorAdapterConfig(
arrow_schema=tfx_io.ArrowSchema(),
tensor_representations=tfx_io.TensorRepresentations())
examples = [
self._makeExample(
input_1=0.0,
label=1.0,
example_weight=1.0,
extra_feature='non_model_feature'),
self._makeExample(
input_1=1.0,
label=0.0,
example_weight=0.5,
extra_feature='non_model_feature'),
]
slicing_specs = [
config_pb2.SlicingSpec(),
config_pb2.SlicingSpec(feature_keys=['slice_does_not_exist'])
]
cross_slicing_specs = [
config_pb2.CrossSlicingSpec(
baseline_spec=config_pb2.SlicingSpec(
feature_keys=['slice_does_not_exist']),
slicing_specs=[
config_pb2.SlicingSpec(feature_keys=['slice_does_not_exist'])
])
]
eval_config = config_pb2.EvalConfig(
model_specs=[
config_pb2.ModelSpec(
name='candidate',
label_key='label',
example_weight_key='example_weight'),
config_pb2.ModelSpec(
name='baseline',
label_key='label',
example_weight_key='example_weight',
is_baseline=True)
],
slicing_specs=slicing_specs,
cross_slicing_specs=cross_slicing_specs,
metrics_specs=[
config_pb2.MetricsSpec(
metrics=[
config_pb2.MetricConfig(
class_name='WeightedExampleCount',
per_slice_thresholds=[
config_pb2.PerSliceMetricThreshold(
slicing_specs=slicing_specs,
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2
.GenericValueThreshold(
upper_bound={'value': 1})))
],
cross_slice_thresholds=[
config_pb2.CrossSliceMetricThreshold(
cross_slicing_specs=cross_slicing_specs,
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2
.GenericValueThreshold(
upper_bound={'value': 1})))
]),
config_pb2.MetricConfig(
class_name='ExampleCount',
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2.GenericValueThreshold(
lower_bound={'value': 10}))),
config_pb2.MetricConfig(
class_name='MeanLabel',
threshold=config_pb2.MetricThreshold(
change_threshold=config_pb2.GenericChangeThreshold(
direction=config_pb2.MetricDirection
.HIGHER_IS_BETTER,
relative={'value': 1},
absolute={'value': 1}))),
config_pb2.MetricConfig(
class_name='MeanPrediction',
threshold=config_pb2.MetricThreshold(
value_threshold=config_pb2.GenericValueThreshold(
upper_bound={'value': .01},
lower_bound={'value': -.01}),
change_threshold=config_pb2.GenericChangeThreshold(
direction=config_pb2.MetricDirection
.LOWER_IS_BETTER,
relative={'value': -.99},
absolute={'value': 0})))
],
model_names=['candidate', 'baseline']),
],
options=config_pb2.Options(
disabled_outputs={'values': ['eval_config_pb2.json']}),
)
slice_spec = [
slicer.SingleSliceSpec(spec=s) for s in eval_config.slicing_specs
]
eval_shared_models = {
'candidate': eval_shared_model,
'baseline': baseline_eval_shared_model
}
extractors = [
features_extractor.FeaturesExtractor(eval_config),
labels_extractor.LabelsExtractor(eval_config),
example_weights_extractor.ExampleWeightsExtractor(eval_config),
predictions_extractor.PredictionsExtractor(
eval_shared_model=eval_shared_models,
eval_config=eval_config,
tensor_adapter_config=tensor_adapter_config),
unbatch_extractor.UnbatchExtractor(),
slice_key_extractor.SliceKeyExtractor(slice_spec=slice_spec)
]
evaluators = [
metrics_plots_and_validations_evaluator
.MetricsPlotsAndValidationsEvaluator(
eval_config=eval_config, eval_shared_model=eval_shared_models)
]
output_paths = {
constants.VALIDATIONS_KEY: validations_file,
}
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
add_metrics_callbacks=[],
output_file_format=output_file_format)
]
with beam.Pipeline() as pipeline:
_ = (
pipeline
| 'Create' >> beam.Create([e.SerializeToString() for e in examples])
| 'BatchExamples' >> tfx_io.BeamSource()
| 'InputsToExtracts' >> model_eval_lib.BatchedInputsToExtracts()
| 'ExtractEvaluate' >> model_eval_lib.ExtractAndEvaluate(
extractors=extractors, evaluators=evaluators)
| 'WriteResults' >> model_eval_lib.WriteResults(writers=writers))
validation_result = (
metrics_plots_and_validations_writer
.load_and_deserialize_validation_result(
os.path.dirname(validations_file), output_file_format))
expected_validations = [
text_format.Parse(
"""
metric_key {
name: "weighted_example_count"
model_name: "candidate"
}
metric_threshold {
value_threshold {
upper_bound {
value: 1.0
}
}
}
metric_value {
double_value {
value: 1.5
}
}
""", validation_result_pb2.ValidationFailure()),
text_format.Parse(
"""
metric_key {
name: "example_count"
model_name: "candidate"
}
metric_threshold {
value_threshold {
lower_bound {
value: 10.0
}
}
}
metric_value {
double_value {
value: 2.0
}
}
""", validation_result_pb2.ValidationFailure()),
text_format.Parse(
"""
metric_key {
name: "mean_label"
model_name: "candidate"
is_diff: true
}
metric_threshold {
change_threshold {
absolute {
value: 1.0
}
relative {
value: 1.0
}
direction: HIGHER_IS_BETTER
}
}
metric_value {
double_value {
value: 0.0
}
}
""", validation_result_pb2.ValidationFailure()),
]
self.assertFalse(validation_result.validation_ok)
self.assertFalse(validation_result.missing_thresholds)
self.assertLen(validation_result.metric_validations_per_slice, 1)
self.assertCountEqual(
expected_validations,
validation_result.metric_validations_per_slice[0].failures)
expected_missing_slices = [
config_pb2.SlicingSpec(feature_keys=['slice_does_not_exist'])
]
self.assertLen(validation_result.missing_slices, 1)
self.assertCountEqual(expected_missing_slices,
validation_result.missing_slices)
expected_missing_cross_slices = [
config_pb2.CrossSlicingSpec(
baseline_spec=config_pb2.SlicingSpec(
feature_keys=['slice_does_not_exist']),
slicing_specs=[
config_pb2.SlicingSpec(feature_keys=['slice_does_not_exist'])
])
]
self.assertLen(validation_result.missing_cross_slices, 1)
self.assertCountEqual(expected_missing_cross_slices,
validation_result.missing_cross_slices)
expected_slicing_details = [
text_format.Parse(
"""
slicing_spec {
}
num_matching_slices: 1
""", validation_result_pb2.SlicingDetails()),
]
self.assertLen(validation_result.validation_details.slicing_details, 1)
self.assertCountEqual(expected_slicing_details,
validation_result.validation_details.slicing_details)
@parameterized.named_parameters(_OUTPUT_FORMAT_PARAMS)
def testWriteValidationResultsNoThresholds(self, output_file_format):
model_dir, baseline_dir = self._getExportDir(), self._getBaselineDir()
eval_shared_model = self._build_keras_model(model_dir, mul=0)
baseline_eval_shared_model = self._build_keras_model(baseline_dir, mul=1)
validations_file = os.path.join(self._getTempDir(),
constants.VALIDATIONS_KEY)
schema = text_format.Parse(
"""
tensor_representation_group {
key: ""
value {
tensor_representation {
key: "input_1"
value {
dense_tensor {
column_name: "input_1"
shape { dim { size: 1 } }
}
}
}
}
}
feature {
name: "input_1"
type: FLOAT
}
feature {
name: "label"
type: FLOAT
}
feature {
name: "example_weight"
type: FLOAT
}
feature {
name: "extra_feature"
type: BYTES
}
""", schema_pb2.Schema())
tfx_io = test_util.InMemoryTFExampleRecord(
schema=schema, raw_record_column_name=constants.ARROW_INPUT_COLUMN)
tensor_adapter_config = tensor_adapter.TensorAdapterConfig(
arrow_schema=tfx_io.ArrowSchema(),
tensor_representations=tfx_io.TensorRepresentations())
examples = [
self._makeExample(
input_1=0.0,
label=1.0,
example_weight=1.0,
extra_feature='non_model_feature'),
self._makeExample(
input_1=1.0,
label=0.0,
example_weight=0.5,
extra_feature='non_model_feature'),
]
slicing_specs = [
config_pb2.SlicingSpec(),
]
eval_config = config_pb2.EvalConfig(
model_specs=[
config_pb2.ModelSpec(
name='candidate',
label_key='label',
example_weight_key='example_weight'),
config_pb2.ModelSpec(
name='baseline',
label_key='label',
example_weight_key='example_weight',
is_baseline=True)
],
slicing_specs=slicing_specs,
metrics_specs=[
config_pb2.MetricsSpec(
metrics=[
config_pb2.MetricConfig(class_name='WeightedExampleCount'),
config_pb2.MetricConfig(class_name='ExampleCount'),
config_pb2.MetricConfig(class_name='MeanLabel')
],
model_names=['candidate', 'baseline']),
],
options=config_pb2.Options(
disabled_outputs={'values': ['eval_config_pb2.json']}),
)
slice_spec = [
slicer.SingleSliceSpec(spec=s) for s in eval_config.slicing_specs
]
eval_shared_models = {
'candidate': eval_shared_model,
'baseline': baseline_eval_shared_model
}
extractors = [
features_extractor.FeaturesExtractor(eval_config),
labels_extractor.LabelsExtractor(eval_config),
example_weights_extractor.ExampleWeightsExtractor(eval_config),
predictions_extractor.PredictionsExtractor(
eval_shared_model=eval_shared_models,
eval_config=eval_config,
tensor_adapter_config=tensor_adapter_config),
unbatch_extractor.UnbatchExtractor(),
slice_key_extractor.SliceKeyExtractor(slice_spec=slice_spec)
]
evaluators = [
metrics_plots_and_validations_evaluator
.MetricsPlotsAndValidationsEvaluator(
eval_config=eval_config, eval_shared_model=eval_shared_models)
]
output_paths = {
constants.VALIDATIONS_KEY: validations_file,
}
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
add_metrics_callbacks=[],
output_file_format=output_file_format)
]
with beam.Pipeline() as pipeline:
_ = (
pipeline
| 'Create' >> beam.Create([e.SerializeToString() for e in examples])
| 'BatchExamples' >> tfx_io.BeamSource()
| 'InputsToExtracts' >> model_eval_lib.BatchedInputsToExtracts()
| 'ExtractEvaluate' >> model_eval_lib.ExtractAndEvaluate(
extractors=extractors, evaluators=evaluators)
| 'WriteResults' >> model_eval_lib.WriteResults(writers=writers))
validation_result = (
metrics_plots_and_validations_writer
.load_and_deserialize_validation_result(
os.path.dirname(validations_file), output_file_format))
self.assertFalse(validation_result.validation_ok)
self.assertTrue(validation_result.missing_thresholds)
self.assertEmpty(validation_result.metric_validations_per_slice)
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
add_metrics_callbacks=[],
output_file_format=output_file_format,
rubber_stamp=True)
]
with beam.Pipeline() as pipeline:
_ = (
pipeline
| 'Create' >> beam.Create([e.SerializeToString() for e in examples])
| 'BatchExamples' >> tfx_io.BeamSource()
| 'InputsToExtracts' >> model_eval_lib.BatchedInputsToExtracts()
| 'ExtractEvaluate' >> model_eval_lib.ExtractAndEvaluate(
extractors=extractors, evaluators=evaluators)
| 'WriteResults' >> model_eval_lib.WriteResults(writers=writers))
validation_result = (
metrics_plots_and_validations_writer
.load_and_deserialize_validation_result(
os.path.dirname(validations_file), output_file_format))
self.assertTrue(validation_result.validation_ok)
self.assertFalse(validation_result.missing_thresholds)
self.assertEmpty(validation_result.metric_validations_per_slice)
self.assertTrue(validation_result.rubber_stamp)
@parameterized.named_parameters(_OUTPUT_FORMAT_PARAMS)
def testWriteMetricsAndPlots(self, output_file_format):
metrics_file = os.path.join(self._getTempDir(), 'metrics')
plots_file = os.path.join(self._getTempDir(), 'plots')
temp_eval_export_dir = os.path.join(self._getTempDir(), 'eval_export_dir')
_, eval_export_dir = (
fixed_prediction_estimator.simple_fixed_prediction_estimator(
None, temp_eval_export_dir))
eval_config = config_pb2.EvalConfig(
model_specs=[config_pb2.ModelSpec()],
options=config_pb2.Options(
disabled_outputs={'values': ['eval_config_pb2.json']}))
eval_shared_model = self.createTestEvalSharedModel(
eval_saved_model_path=eval_export_dir,
add_metrics_callbacks=[
post_export_metrics.example_count(),
post_export_metrics.calibration_plot_and_prediction_histogram(
num_buckets=2)
])
extractors = [
legacy_predict_extractor.PredictExtractor(
eval_shared_model, eval_config=eval_config),
unbatch_extractor.UnbatchExtractor(),
slice_key_extractor.SliceKeyExtractor()
]
evaluators = [
legacy_metrics_and_plots_evaluator.MetricsAndPlotsEvaluator(
eval_shared_model)
]
output_paths = {
constants.METRICS_KEY: metrics_file,
constants.PLOTS_KEY: plots_file
}
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
add_metrics_callbacks=eval_shared_model.add_metrics_callbacks,
output_file_format=output_file_format)
]
tfx_io = raw_tf_record.RawBeamRecordTFXIO(
physical_format='inmemory',
raw_record_column_name=constants.ARROW_INPUT_COLUMN,
telemetry_descriptors=['TFMATest'])
with beam.Pipeline() as pipeline:
example1 = self._makeExample(prediction=0.0, label=1.0)
example2 = self._makeExample(prediction=1.0, label=1.0)
_ = (
pipeline
| 'Create' >> beam.Create([
example1.SerializeToString(),
example2.SerializeToString(),
])
| 'BatchExamples' >> tfx_io.BeamSource()
| 'ExtractEvaluateAndWriteResults' >>
model_eval_lib.ExtractEvaluateAndWriteResults(
eval_config=eval_config,
eval_shared_model=eval_shared_model,
extractors=extractors,
evaluators=evaluators,
writers=writers))
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {}
metrics {
key: "average_loss"
value {
double_value {
value: 0.5
}
}
}
metrics {
key: "post_export_metrics/example_count"
value {
double_value {
value: 2.0
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
metric_records = list(
metrics_plots_and_validations_writer.load_and_deserialize_metrics(
metrics_file, output_file_format))
self.assertLen(metric_records, 1, 'metrics: %s' % metric_records)
self.assertProtoEquals(expected_metrics_for_slice, metric_records[0])
expected_plots_for_slice = text_format.Parse(
"""
slice_key {}
plots {
key: "post_export_metrics"
value {
calibration_histogram_buckets {
buckets {
lower_threshold_inclusive: -inf
num_weighted_examples {}
total_weighted_label {}
total_weighted_refined_prediction {}
}
buckets {
upper_threshold_exclusive: 0.5
num_weighted_examples {
value: 1.0
}
total_weighted_label {
value: 1.0
}
total_weighted_refined_prediction {}
}
buckets {
lower_threshold_inclusive: 0.5
upper_threshold_exclusive: 1.0
num_weighted_examples {
}
total_weighted_label {}
total_weighted_refined_prediction {}
}
buckets {
lower_threshold_inclusive: 1.0
upper_threshold_exclusive: inf
num_weighted_examples {
value: 1.0
}
total_weighted_label {
value: 1.0
}
total_weighted_refined_prediction {
value: 1.0
}
}
}
}
}
""", metrics_for_slice_pb2.PlotsForSlice())
plot_records = list(
metrics_plots_and_validations_writer.load_and_deserialize_plots(
plots_file, output_file_format))
self.assertLen(plot_records, 1, 'plots: %s' % plot_records)
self.assertProtoEquals(expected_plots_for_slice, plot_records[0])
@parameterized.named_parameters(('parquet_file_format', 'parquet'))
def testLoadAndDeserializeFilteredMetricsAndPlots(self, output_file_format):
metrics_file = os.path.join(self._getTempDir(), 'metrics')
plots_file = os.path.join(self._getTempDir(), 'plots')
temp_eval_export_dir = os.path.join(self._getTempDir(), 'eval_export_dir')
_, eval_export_dir = (
fixed_prediction_estimator.simple_fixed_prediction_estimator(
None, temp_eval_export_dir))
eval_config = config_pb2.EvalConfig(
model_specs=[config_pb2.ModelSpec()],
slicing_specs=[
config_pb2.SlicingSpec(),
config_pb2.SlicingSpec(feature_keys=['prediction'])
],
options=config_pb2.Options(
disabled_outputs={'values': ['eval_config_pb2.json']}))
eval_shared_model = self.createTestEvalSharedModel(
eval_saved_model_path=eval_export_dir,
add_metrics_callbacks=[
post_export_metrics.example_count(),
post_export_metrics.calibration_plot_and_prediction_histogram(
num_buckets=2)
])
extractors = [
legacy_predict_extractor.PredictExtractor(
eval_shared_model, eval_config=eval_config),
unbatch_extractor.UnbatchExtractor(),
slice_key_extractor.SliceKeyExtractor(
eval_config=eval_config, materialize=False)
]
evaluators = [
legacy_metrics_and_plots_evaluator.MetricsAndPlotsEvaluator(
eval_shared_model)
]
output_paths = {
constants.METRICS_KEY: metrics_file,
constants.PLOTS_KEY: plots_file
}
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
add_metrics_callbacks=eval_shared_model.add_metrics_callbacks,
output_file_format=output_file_format)
]
tfx_io = raw_tf_record.RawBeamRecordTFXIO(
physical_format='inmemory',
raw_record_column_name=constants.ARROW_INPUT_COLUMN,
telemetry_descriptors=['TFMATest'])
with beam.Pipeline() as pipeline:
example1 = self._makeExample(prediction=0.0, label=1.0, country='US')
example2 = self._makeExample(prediction=1.0, label=1.0, country='CA')
_ = (
pipeline
| 'Create' >> beam.Create([
example1.SerializeToString(),
example2.SerializeToString(),
])
| 'BatchExamples' >> tfx_io.BeamSource()
| 'ExtractEvaluateAndWriteResults' >>
model_eval_lib.ExtractEvaluateAndWriteResults(
eval_config=eval_config,
eval_shared_model=eval_shared_model,
extractors=extractors,
evaluators=evaluators,
writers=writers))
slice_keys_filter = [slicer.SingleSliceSpec(features=[('prediction', 0)])]
expected_metrics_for_slice = text_format.Parse(
"""
slice_key {
single_slice_keys {
column: "prediction"
float_value: 0
}
}
metrics {
key: "average_loss"
value {
double_value {
value: 1.0
}
}
}
metrics {
key: "post_export_metrics/example_count"
value {
double_value {
value: 1.0
}
}
}
""", metrics_for_slice_pb2.MetricsForSlice())
metric_records = list(
metrics_plots_and_validations_writer.load_and_deserialize_metrics(
metrics_file, output_file_format, slice_keys_filter))
self.assertLen(metric_records, 1, 'metrics: %s' % metric_records)
self.assertProtoEquals(expected_metrics_for_slice, metric_records[0])
expected_plots_for_slice = text_format.Parse(
"""
slice_key {
single_slice_keys {
column: "prediction"
float_value: 0
}
}
plots {
key: "post_export_metrics"
value {
calibration_histogram_buckets {
buckets {
lower_threshold_inclusive: -inf
num_weighted_examples {}
total_weighted_label {}
total_weighted_refined_prediction {}
}
buckets {
upper_threshold_exclusive: 0.5
num_weighted_examples {
value: 1.0
}
total_weighted_label {
value: 1.0
}
total_weighted_refined_prediction {}
}
buckets {
lower_threshold_inclusive: 0.5
upper_threshold_exclusive: 1.0
num_weighted_examples {
}
total_weighted_label {}
total_weighted_refined_prediction {}
}
buckets {
lower_threshold_inclusive: 1.0
upper_threshold_exclusive: inf
num_weighted_examples {
value: 0.0
}
total_weighted_label {
value: 0.0
}
total_weighted_refined_prediction {
value: 0.0
}
}
}
}
}
""", metrics_for_slice_pb2.PlotsForSlice())
plot_records = list(
metrics_plots_and_validations_writer.load_and_deserialize_plots(
plots_file, output_file_format, slice_keys_filter))
self.assertLen(plot_records, 1, 'plots: %s' % plot_records)
self.assertProtoEquals(expected_plots_for_slice, plot_records[0])
@parameterized.named_parameters(_OUTPUT_FORMAT_PARAMS)
def testWriteAttributions(self, output_file_format):
attributions_file = os.path.join(self._getTempDir(), 'attributions')
eval_config = config_pb2.EvalConfig(
model_specs=[config_pb2.ModelSpec()],
metrics_specs=[
config_pb2.MetricsSpec(metrics=[
config_pb2.MetricConfig(class_name=attributions
.TotalAttributions().__class__.__name__)
])
],
options=config_pb2.Options(
disabled_outputs={'values': ['eval_config.json']}))
extractors = [slice_key_extractor.SliceKeyExtractor()]
evaluators = [
metrics_plots_and_validations_evaluator
.MetricsPlotsAndValidationsEvaluator(eval_config=eval_config)
]
output_paths = {
constants.ATTRIBUTIONS_KEY: attributions_file,
}
writers = [
metrics_plots_and_validations_writer.MetricsPlotsAndValidationsWriter(
output_paths,
eval_config=eval_config,
output_file_format=output_file_format)
]
example1 = {
'features': {},
'attributions': {
'feature1': 1.1,
'feature2': 1.2
}
}
example2 = {
'features': {},
'attributions': {
'feature1': 2.1,
'feature2': 2.2
}
}
example3 = {
'features': {},
'attributions': {
'feature1': np.array([3.1]),
'feature2': np.array([3.2])
}
}
with beam.Pipeline() as pipeline:
_ = (
pipeline
| 'Create' >> beam.Create([example1, example2, example3])
| 'ExtractEvaluate' >> model_eval_lib.ExtractAndEvaluate(
extractors=extractors, evaluators=evaluators)
| 'WriteResults' >> model_eval_lib.WriteResults(writers=writers))
expected_attributions_for_slice = text_format.Parse(
"""
slice_key {}
attributions_keys_and_values {
key {
name: "total_attributions"
}
values {
key: "feature1"
value: {
double_value {
value: 6.3
}
}
}
values {
key: "feature2"
value: {
double_value {
value: 6.6
}
}
}
}""", metrics_for_slice_pb2.AttributionsForSlice())
attribution_records = list(
metrics_plots_and_validations_writer.load_and_deserialize_attributions(
attributions_file, output_file_format))
self.assertLen(attribution_records, 1)
self.assertProtoEquals(expected_attributions_for_slice,
attribution_records[0])
if __name__ == '__main__':
tf.compat.v1.enable_v2_behavior()
tf.test.main()
| true | true |
1c383a645bd26f9fa6a5955ec222204a3829f31e | 5,276 | py | Python | tests/filters/path_filter.py | pyllyukko/plaso | 7533db2d1035ca71d264d6281ebd5db2d073c587 | [
"Apache-2.0"
] | 1,253 | 2015-01-02T13:58:02.000Z | 2022-03-31T08:43:39.000Z | tests/filters/path_filter.py | pyllyukko/plaso | 7533db2d1035ca71d264d6281ebd5db2d073c587 | [
"Apache-2.0"
] | 3,388 | 2015-01-02T11:17:58.000Z | 2022-03-30T10:21:45.000Z | tests/filters/path_filter.py | pyllyukko/plaso | 7533db2d1035ca71d264d6281ebd5db2d073c587 | [
"Apache-2.0"
] | 376 | 2015-01-20T07:04:54.000Z | 2022-03-04T23:53:00.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the scan tree-based path filter."""
import unittest
from plaso.filters import path_filter
from tests.filters import test_lib
class PathFilterScanTreeTest(test_lib.FilterTestCase):
"""Tests for the path filter scan tree."""
# pylint: disable=protected-access
def testInitialize(self):
"""Tests the initialize function."""
scan_tree = path_filter.PathFilterScanTree([])
self.assertIsNone(scan_tree._root_node)
paths = [
'HKEY_CURRENT_USER\\Software\\WinRAR\\ArcHistory',
'HKEY_CURRENT_USER\\Software\\WinRAR\\DialogEditHistory\\ArcName',
'HKEY_CURRENT_USER\\Software\\WinRAR\\DialogEditHistory\\ExtrPath',
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{FA99DFC7-6AC2-453A-A5E2-5E2AFF4507BD}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{F4E57C4B-2036-45F0-A9AB-443BCFE33D9F}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{F2A1CB5A-E3CC-4A2E-AF9D-505A7009D442}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{CEBFF5CD-ACE2-4F4F-9178-9926F41749EA}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{CAA59E3C-4792-41A5-9909-6A6A8D32490E}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{B267E3AD-A825-4A09-82B9-EEC22AA3B847}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{A3D53349-6E61-4557-8FC7-0028EDCEEBF6}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{9E04CAB2-CC14-11DF-BB8C-A2F1DED72085}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{75048700-EF1F-11D0-9888-006097DEACF9}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{5E6AB780-7743-11CF-A12B-00AA004AE837}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{0D6D4F41-2994-4BA0-8FEF-620E43CD2812}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{BCB48336-4DDD-48FF-BB0B-D3190DACB3E2}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Internet Explorer\\'
'TypedURLs'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\TypedPaths'),
('HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatibility'),
('HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatCache'),
('HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows NT\\'
'CurrentVersion'),
'HKEY_LOCAL_MACHINE\\SAM\\Domains\\Account\\Users',
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Lockdown_Zones'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Zones'),
('HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Lockdown_Zones'),
('HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Zones'),
]
scan_tree = path_filter.PathFilterScanTree(
paths, path_segment_separator='\\')
self.assertIsNotNone(scan_tree._root_node)
path = 'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\Windows'
self.assertFalse(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatCache')
self.assertTrue(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatCache')
self.assertFalse(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE/System/CurrentControlSet/Control/'
'Session Manager/AppCompatCache')
self.assertTrue(scan_tree.CheckPath(path, path_segment_separator='/'))
scan_tree = path_filter.PathFilterScanTree(
paths, case_sensitive=False, path_segment_separator='\\')
self.assertIsNotNone(scan_tree._root_node)
path = 'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\Windows'
self.assertFalse(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatCache')
self.assertTrue(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatCache')
self.assertTrue(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE/System/CurrentControlSet/Control/'
'Session Manager/AppCompatCache')
self.assertTrue(scan_tree.CheckPath(path, path_segment_separator='/'))
if __name__ == '__main__':
unittest.main()
| 44.711864 | 77 | 0.69674 |
import unittest
from plaso.filters import path_filter
from tests.filters import test_lib
class PathFilterScanTreeTest(test_lib.FilterTestCase):
def testInitialize(self):
scan_tree = path_filter.PathFilterScanTree([])
self.assertIsNone(scan_tree._root_node)
paths = [
'HKEY_CURRENT_USER\\Software\\WinRAR\\ArcHistory',
'HKEY_CURRENT_USER\\Software\\WinRAR\\DialogEditHistory\\ArcName',
'HKEY_CURRENT_USER\\Software\\WinRAR\\DialogEditHistory\\ExtrPath',
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{FA99DFC7-6AC2-453A-A5E2-5E2AFF4507BD}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{F4E57C4B-2036-45F0-A9AB-443BCFE33D9F}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{F2A1CB5A-E3CC-4A2E-AF9D-505A7009D442}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{CEBFF5CD-ACE2-4F4F-9178-9926F41749EA}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{CAA59E3C-4792-41A5-9909-6A6A8D32490E}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{B267E3AD-A825-4A09-82B9-EEC22AA3B847}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{A3D53349-6E61-4557-8FC7-0028EDCEEBF6}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{9E04CAB2-CC14-11DF-BB8C-A2F1DED72085}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{75048700-EF1F-11D0-9888-006097DEACF9}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{5E6AB780-7743-11CF-A12B-00AA004AE837}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{0D6D4F41-2994-4BA0-8FEF-620E43CD2812}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist\\{BCB48336-4DDD-48FF-BB0B-D3190DACB3E2}'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Internet Explorer\\'
'TypedURLs'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\TypedPaths'),
('HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatibility'),
('HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatCache'),
('HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows NT\\'
'CurrentVersion'),
'HKEY_LOCAL_MACHINE\\SAM\\Domains\\Account\\Users',
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Lockdown_Zones'),
('HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Zones'),
('HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Lockdown_Zones'),
('HKEY_LOCAL_MACHINE\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Internet Settings\\Zones'),
]
scan_tree = path_filter.PathFilterScanTree(
paths, path_segment_separator='\\')
self.assertIsNotNone(scan_tree._root_node)
path = 'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\Windows'
self.assertFalse(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatCache')
self.assertTrue(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatCache')
self.assertFalse(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE/System/CurrentControlSet/Control/'
'Session Manager/AppCompatCache')
self.assertTrue(scan_tree.CheckPath(path, path_segment_separator='/'))
scan_tree = path_filter.PathFilterScanTree(
paths, case_sensitive=False, path_segment_separator='\\')
self.assertIsNotNone(scan_tree._root_node)
path = 'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\Windows'
self.assertFalse(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatCache')
self.assertTrue(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE\\SYSTEM\\CurrentControlSet\\Control\\'
'Session Manager\\AppCompatCache')
self.assertTrue(scan_tree.CheckPath(path))
path = (
'HKEY_LOCAL_MACHINE/System/CurrentControlSet/Control/'
'Session Manager/AppCompatCache')
self.assertTrue(scan_tree.CheckPath(path, path_segment_separator='/'))
if __name__ == '__main__':
unittest.main()
| true | true |
1c383c031f1ec6570de8548f7cf0b056ff16dca3 | 1,000 | py | Python | face.py | yahya-idriss/Python-Personal-assistant | 97ebac124441f3fbcb6d74a4e33da14e4632afc0 | [
"MIT"
] | null | null | null | face.py | yahya-idriss/Python-Personal-assistant | 97ebac124441f3fbcb6d74a4e33da14e4632afc0 | [
"MIT"
] | null | null | null | face.py | yahya-idriss/Python-Personal-assistant | 97ebac124441f3fbcb6d74a4e33da14e4632afc0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import cv2
import numpy as np
import sqlite3
from function import function
class face_detect():
def __init__(self):
self.cam = cv2.VideoCapture(0)
self.detector=cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
server = function()
self.id = server.get_last_id()
def new(self):
Id=str(self.id+1)
sampleNum=0
while(True):
ret, img = self.cam.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = self.detector.detectMultiScale(gray, 1.3, 5)
for (x,y,w,h) in faces:
cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)
sampleNum=sampleNum+1
cv2.imwrite("dataSet/User."+Id +'.'+ str(sampleNum) + ".jpg", gray[y:y+h,x:x+w]) #
cv2.imshow('frame',img)
if cv2.waitKey(100) & 0xFF == ord('q'):
break
elif sampleNum>20:
break
self.cam.release()
cv2.destroyAllWindows()
| 25 | 94 | 0.583 |
import os
import cv2
import numpy as np
import sqlite3
from function import function
class face_detect():
def __init__(self):
self.cam = cv2.VideoCapture(0)
self.detector=cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
server = function()
self.id = server.get_last_id()
def new(self):
Id=str(self.id+1)
sampleNum=0
while(True):
ret, img = self.cam.read()
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = self.detector.detectMultiScale(gray, 1.3, 5)
for (x,y,w,h) in faces:
cv2.rectangle(img,(x,y),(x+w,y+h),(255,0,0),2)
sampleNum=sampleNum+1
cv2.imwrite("dataSet/User."+Id +'.'+ str(sampleNum) + ".jpg", gray[y:y+h,x:x+w])
cv2.imshow('frame',img)
if cv2.waitKey(100) & 0xFF == ord('q'):
break
elif sampleNum>20:
break
self.cam.release()
cv2.destroyAllWindows()
| true | true |
1c383c2418e352a46d37f1383de7f35a738196ea | 190 | py | Python | grid_tuple.py | ErnestoPena/Game-Of-Life.-Python | 046bca4ae1c69e08efed4f45057b097653410ec8 | [
"MIT"
] | null | null | null | grid_tuple.py | ErnestoPena/Game-Of-Life.-Python | 046bca4ae1c69e08efed4f45057b097653410ec8 | [
"MIT"
] | null | null | null | grid_tuple.py | ErnestoPena/Game-Of-Life.-Python | 046bca4ae1c69e08efed4f45057b097653410ec8 | [
"MIT"
] | null | null | null | import random
def construct_main_tuple():
grid_list = []
for y in range(0,100):
for x in range(0,100):
grid_list.append([x*10, y*10, False])
return grid_list | 23.75 | 49 | 0.605263 | import random
def construct_main_tuple():
grid_list = []
for y in range(0,100):
for x in range(0,100):
grid_list.append([x*10, y*10, False])
return grid_list | true | true |
1c383d4343abe437b12bcd988019af09a77e5979 | 3,664 | py | Python | tests/components/garmin_connect/test_config_flow.py | domwillcode/home-assistant | f170c80bea70c939c098b5c88320a1c789858958 | [
"Apache-2.0"
] | 4 | 2020-07-29T17:47:10.000Z | 2020-09-16T13:39:13.000Z | tests/components/garmin_connect/test_config_flow.py | domwillcode/home-assistant | f170c80bea70c939c098b5c88320a1c789858958 | [
"Apache-2.0"
] | 6 | 2020-11-08T19:40:10.000Z | 2022-03-01T11:11:07.000Z | tests/components/garmin_connect/test_config_flow.py | klauern/home-assistant-core | c18ba6aec0627e6afb6442c678edb5ff2bb17db6 | [
"Apache-2.0"
] | 5 | 2020-03-29T00:29:13.000Z | 2021-09-06T20:58:40.000Z | """Test the Garmin Connect config flow."""
from garminconnect import (
GarminConnectAuthenticationError,
GarminConnectConnectionError,
GarminConnectTooManyRequestsError,
)
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.garmin_connect.const import DOMAIN
from homeassistant.const import CONF_ID, CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import patch
from tests.common import MockConfigEntry
MOCK_CONF = {
CONF_ID: "First Lastname",
CONF_USERNAME: "my@email.address",
CONF_PASSWORD: "mypassw0rd",
}
@pytest.fixture(name="mock_garmin_connect")
def mock_garmin():
"""Mock Garmin."""
with patch("homeassistant.components.garmin_connect.config_flow.Garmin",) as garmin:
garmin.return_value.get_full_name.return_value = MOCK_CONF[CONF_ID]
yield garmin.return_value
async def test_show_form(hass):
"""Test that the form is served with no input."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_user(hass, mock_garmin_connect):
"""Test registering an integration and finishing flow works."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == MOCK_CONF
async def test_connection_error(hass, mock_garmin_connect):
"""Test for connection error."""
mock_garmin_connect.login.side_effect = GarminConnectConnectionError("errormsg")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_authentication_error(hass, mock_garmin_connect):
"""Test for authentication error."""
mock_garmin_connect.login.side_effect = GarminConnectAuthenticationError("errormsg")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "invalid_auth"}
async def test_toomanyrequest_error(hass, mock_garmin_connect):
"""Test for toomanyrequests error."""
mock_garmin_connect.login.side_effect = GarminConnectTooManyRequestsError(
"errormsg"
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "too_many_requests"}
async def test_unknown_error(hass, mock_garmin_connect):
"""Test for unknown error."""
mock_garmin_connect.login.side_effect = Exception
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "unknown"}
async def test_abort_if_already_setup(hass, mock_garmin_connect):
"""Test abort if already setup."""
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONF, unique_id=MOCK_CONF[CONF_ID])
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
| 36.64 | 88 | 0.728985 | from garminconnect import (
GarminConnectAuthenticationError,
GarminConnectConnectionError,
GarminConnectTooManyRequestsError,
)
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.garmin_connect.const import DOMAIN
from homeassistant.const import CONF_ID, CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import patch
from tests.common import MockConfigEntry
MOCK_CONF = {
CONF_ID: "First Lastname",
CONF_USERNAME: "my@email.address",
CONF_PASSWORD: "mypassw0rd",
}
@pytest.fixture(name="mock_garmin_connect")
def mock_garmin():
with patch("homeassistant.components.garmin_connect.config_flow.Garmin",) as garmin:
garmin.return_value.get_full_name.return_value = MOCK_CONF[CONF_ID]
yield garmin.return_value
async def test_show_form(hass):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
async def test_step_user(hass, mock_garmin_connect):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] == MOCK_CONF
async def test_connection_error(hass, mock_garmin_connect):
mock_garmin_connect.login.side_effect = GarminConnectConnectionError("errormsg")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "cannot_connect"}
async def test_authentication_error(hass, mock_garmin_connect):
mock_garmin_connect.login.side_effect = GarminConnectAuthenticationError("errormsg")
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "invalid_auth"}
async def test_toomanyrequest_error(hass, mock_garmin_connect):
mock_garmin_connect.login.side_effect = GarminConnectTooManyRequestsError(
"errormsg"
)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "too_many_requests"}
async def test_unknown_error(hass, mock_garmin_connect):
mock_garmin_connect.login.side_effect = Exception
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "unknown"}
async def test_abort_if_already_setup(hass, mock_garmin_connect):
entry = MockConfigEntry(domain=DOMAIN, data=MOCK_CONF, unique_id=MOCK_CONF[CONF_ID])
entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": "user"}, data=MOCK_CONF
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
| true | true |
1c383d5095d9c1f64070cb21511e8a323a60d196 | 1,832 | py | Python | ml_service/pipelines/diabetes_regression_build_train_pipeline_with_r.py | tspen/MLOpsPython | dee07128e92c37ed6105938825530277672c8339 | [
"MIT"
] | null | null | null | ml_service/pipelines/diabetes_regression_build_train_pipeline_with_r.py | tspen/MLOpsPython | dee07128e92c37ed6105938825530277672c8339 | [
"MIT"
] | null | null | null | ml_service/pipelines/diabetes_regression_build_train_pipeline_with_r.py | tspen/MLOpsPython | dee07128e92c37ed6105938825530277672c8339 | [
"MIT"
] | null | null | null | from azureml.pipeline.steps import PythonScriptStep
from azureml.pipeline.core import Pipeline
from azureml.core import Workspace
from azureml.core.runconfig import RunConfiguration, CondaDependencies
from ml_service.util.attach_compute import get_compute
from ml_service.util.env_variables import Env
def main():
e = Env()
# Get Azure machine learning workspace
aml_workspace = Workspace.get(
name=e.workspace_name,
subscription_id=e.subscription_id,
resource_group=e.resource_group
)
print("get_workspace:")
print(aml_workspace)
# Get Azure machine learning cluster
aml_compute = get_compute(
aml_workspace,
e.compute_name,
e.vm_size)
if aml_compute is not None:
print("aml_compute:")
print(aml_compute)
# Create a run configuration environment
conda_deps_file = "diabetes_regression/training_dependencies.yml"
conda_deps = CondaDependencies(conda_deps_file)
run_config = RunConfiguration(conda_dependencies=conda_deps)
run_config.environment.docker.enabled = True
train_step = PythonScriptStep(
name="Train Model",
script_name="train_with_r.py",
compute_target=aml_compute,
source_directory="diabetes_regression/training/R",
runconfig=run_config,
allow_reuse=False,
)
print("Step Train created")
steps = [train_step]
train_pipeline = Pipeline(workspace=aml_workspace, steps=steps)
train_pipeline.validate()
published_pipeline = train_pipeline.publish(
name=e.pipeline_name,
description="Model training/retraining pipeline",
version=e.build_id
)
print(f'Published pipeline: {published_pipeline.name}')
print(f'for build {published_pipeline.version}')
if __name__ == '__main__':
main()
| 30.533333 | 70 | 0.718341 | from azureml.pipeline.steps import PythonScriptStep
from azureml.pipeline.core import Pipeline
from azureml.core import Workspace
from azureml.core.runconfig import RunConfiguration, CondaDependencies
from ml_service.util.attach_compute import get_compute
from ml_service.util.env_variables import Env
def main():
e = Env()
aml_workspace = Workspace.get(
name=e.workspace_name,
subscription_id=e.subscription_id,
resource_group=e.resource_group
)
print("get_workspace:")
print(aml_workspace)
aml_compute = get_compute(
aml_workspace,
e.compute_name,
e.vm_size)
if aml_compute is not None:
print("aml_compute:")
print(aml_compute)
conda_deps_file = "diabetes_regression/training_dependencies.yml"
conda_deps = CondaDependencies(conda_deps_file)
run_config = RunConfiguration(conda_dependencies=conda_deps)
run_config.environment.docker.enabled = True
train_step = PythonScriptStep(
name="Train Model",
script_name="train_with_r.py",
compute_target=aml_compute,
source_directory="diabetes_regression/training/R",
runconfig=run_config,
allow_reuse=False,
)
print("Step Train created")
steps = [train_step]
train_pipeline = Pipeline(workspace=aml_workspace, steps=steps)
train_pipeline.validate()
published_pipeline = train_pipeline.publish(
name=e.pipeline_name,
description="Model training/retraining pipeline",
version=e.build_id
)
print(f'Published pipeline: {published_pipeline.name}')
print(f'for build {published_pipeline.version}')
if __name__ == '__main__':
main()
| true | true |
1c383e37261953fcc99197b12f66d9f5d4fdffcd | 13,484 | py | Python | manila/tests/share/drivers/quobyte/test_quobyte.py | nidhimittalhada/access_group_repo | 62f3365bc5fb728fcca692a9b3977690fabcd78f | [
"Apache-2.0"
] | 1 | 2015-05-28T22:28:08.000Z | 2015-05-28T22:28:08.000Z | manila/tests/share/drivers/quobyte/test_quobyte.py | nidhimittalhada/access_group_repo | 62f3365bc5fb728fcca692a9b3977690fabcd78f | [
"Apache-2.0"
] | 5 | 2015-08-13T15:17:28.000Z | 2016-08-02T02:55:01.000Z | manila/tests/share/drivers/quobyte/test_quobyte.py | nidhimittalhada/access_group_repo | 62f3365bc5fb728fcca692a9b3977690fabcd78f | [
"Apache-2.0"
] | 2 | 2015-08-29T08:19:58.000Z | 2016-08-02T02:46:10.000Z | # Copyright (c) 2015 Quobyte, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
import six
from manila import context
from manila import exception
from manila.share import configuration as config
from manila.share import driver
from manila.share.drivers.quobyte import jsonrpc
from manila.share.drivers.quobyte import quobyte
from manila import test
from manila.tests import fake_share
CONF = cfg.CONF
def fake_rpc_handler(name, *args):
if name == 'resolveVolumeName':
return None
elif name == 'createVolume':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {'nfs_server_ip': 'fake_location',
'nfs_export_path': '/fake_share'}
class QuobyteShareDriverTestCase(test.TestCase):
"""Tests QuobyteShareDriver."""
def setUp(self):
super(QuobyteShareDriverTestCase, self).setUp()
self._context = context.get_admin_context()
CONF.set_default('driver_handles_share_servers', False)
self.fake_conf = config.Configuration(None)
self._driver = quobyte.QuobyteShareDriver(configuration=self.fake_conf)
self._driver.rpc = mock.Mock()
self.share = fake_share.fake_share(share_proto='NFS')
self.access = fake_share.fake_access()
@mock.patch('manila.share.drivers.quobyte.jsonrpc.JsonRpc', mock.Mock())
def test_do_setup_success(self):
self._driver.rpc.call = mock.Mock(return_value=None)
self._driver.do_setup(self._context)
self._driver.rpc.call.assert_called_with('getInformation', {})
@mock.patch('manila.share.drivers.quobyte.jsonrpc.JsonRpc.__init__',
mock.Mock(return_value=None))
@mock.patch.object(jsonrpc.JsonRpc, 'call',
side_effect=exception.QBRpcException)
def test_do_setup_failure(self, mock_call):
self.assertRaises(exception.QBException,
self._driver.do_setup, self._context)
def test_create_share_new_volume(self):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
result = self._driver.create_share(self._context, self.share)
self.assertEqual(self.share['export_location'], result)
self._driver.rpc.call.assert_has_calls([
mock.call('createVolume', dict(
name=self.share['name'],
tenant_domain=self.share['project_id'],
root_user_id=self.fake_conf.quobyte_default_volume_user,
root_group_id=self.fake_conf.quobyte_default_volume_group,
configuration_name=self.fake_conf.quobyte_volume_configuration
)),
mock.call('exportVolume',
dict(protocol='NFS', volume_uuid='voluuid'))])
def test_create_share_existing_volume(self):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
self._driver.create_share(self._context, self.share)
self._driver.rpc.call.assert_called_with(
'exportVolume', dict(protocol='NFS', volume_uuid='voluuid'))
def test_create_share_wrong_protocol(self):
share = {'share_proto': 'WRONG_PROTOCOL'}
self.assertRaises(exception.QBException,
self._driver.create_share,
context=None,
share=share)
def test_delete_share_existing_volume(self):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {}
self._driver.configuration.quobyte_delete_shares = True
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
self._driver.delete_share(self._context, self.share)
self._driver.rpc.call.assert_has_calls([
mock.call('resolveVolumeName',
{'volume_name': 'fakename',
'tenant_domain': 'fake_project_uuid'}),
mock.call('deleteVolume', {'volume_uuid': 'voluuid'}),
mock.call('exportVolume', {'volume_uuid': 'voluuid',
'remove_export': True})])
def test_delete_share_existing_volume_disabled(self):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {}
CONF.set_default('quobyte_delete_shares', False)
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
self._driver.delete_share(self._context, self.share)
self._driver.rpc.call.assert_called_with(
'exportVolume', {'volume_uuid': 'voluuid',
'remove_export': True})
@mock.patch.object(quobyte.LOG, 'warning')
def test_delete_share_nonexisting_volume(self, mock_warning):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return None
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
self._driver.delete_share(self._context, self.share)
mock_warning.assert_called_with(
'No volume found for share fake_project_uuid/fakename')
def test_allow_access(self):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {'nfs_server_ip': '10.10.1.1',
'nfs_export_path': '/voluuid'}
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
self._driver.allow_access(self._context, self.share, self.access)
self._driver.rpc.call.assert_called_with(
'exportVolume', {'volume_uuid': 'voluuid',
'read_only': False,
'add_allow_ip': '10.0.0.1'})
def test_allow_ro_access(self):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {'nfs_server_ip': '10.10.1.1',
'nfs_export_path': '/voluuid'}
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
ro_access = fake_share.fake_access(access_level='ro')
self._driver.allow_access(self._context, self.share, ro_access)
self._driver.rpc.call.assert_called_with(
'exportVolume', {'volume_uuid': 'voluuid',
'read_only': True,
'add_allow_ip': '10.0.0.1'})
def test_allow_access_nonip(self):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
self.access = fake_share.fake_access(**{"access_type":
"non_existant_access_type"})
self.assertRaises(exception.InvalidShareAccess,
self._driver.allow_access,
self._context, self.share, self.access)
def test_deny_access(self):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {'nfs_server_ip': '10.10.1.1',
'nfs_export_path': '/voluuid'}
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
self._driver.deny_access(self._context, self.share, self.access)
self._driver.rpc.call.assert_called_with(
'exportVolume',
{'volume_uuid': 'voluuid', 'remove_allow_ip': '10.0.0.1'})
@mock.patch.object(quobyte.LOG, 'debug')
def test_deny_access_nonip(self, mock_debug):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
self.access = fake_share.fake_access(
access_type="non_existant_access_type")
self._driver.deny_access(self._context, self.share, self.access)
mock_debug.assert_called_with(
'Quobyte driver only supports ip access control. '
'Ignoring deny access call for %s , %s',
'fakename', 'fake_project_uuid')
def test_resolve_volume_name(self):
self._driver.rpc.call = mock.Mock(
return_value={'volume_uuid': 'fake_uuid'})
self._driver._resolve_volume_name('fake_vol_name', 'fake_domain_name')
self._driver.rpc.call.assert_called_with(
'resolveVolumeName',
{'volume_name': 'fake_vol_name',
'tenant_domain': 'fake_domain_name'})
def test_resolve_volume_name_NOENT(self):
self._driver.rpc.call = mock.Mock(
return_value=None)
self.assertIsNone(
self._driver._resolve_volume_name('fake_vol_name',
'fake_domain_name'))
def test_resolve_volume_name_other_error(self):
self._driver.rpc.call = mock.Mock(
side_effect=exception.QBRpcException(
result='fubar',
qbcode=666))
self.assertRaises(exception.QBRpcException,
self._driver._resolve_volume_name,
volume_name='fake_vol_name',
tenant_domain='fake_domain_name')
@mock.patch.object(driver.ShareDriver, '_update_share_stats')
def test_update_share_stats(self, mock_uss):
self._driver._get_capacities = mock.Mock(return_value=[42, 23])
self._driver._update_share_stats()
mock_uss.assert_called_once_with(
dict(storage_protocol='NFS',
vendor_name='Quobyte',
share_backend_name=self._driver.backend_name,
driver_version=self._driver.DRIVER_VERSION,
total_capacity_gb=42,
free_capacity_gb=23,
reserved_percentage=0))
def test_get_capacities_gb(self):
capval = 42115548133
useval = 19695128917
self._driver.rpc.call = mock.Mock(
return_value={'total_logical_capacity': six.text_type(capval),
'total_logical_usage': six.text_type(useval)})
self.assertEqual((39.223160718, 20.880642548),
self._driver._get_capacities())
@mock.patch.object(quobyte.QuobyteShareDriver,
"_resolve_volume_name",
return_value="fake_uuid")
def test_ensure_share(self, mock_qb_resolve_volname):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
result = self._driver.ensure_share(self._context, self.share, None)
self.assertEqual(self.share["export_location"], result)
(mock_qb_resolve_volname.
assert_called_once_with(self.share['name'],
self.share['project_id']))
self._driver.rpc.call.assert_has_calls([
mock.call('exportVolume', dict(
volume_uuid="fake_uuid",
protocol='NFS'
))])
@mock.patch.object(quobyte.QuobyteShareDriver,
"_resolve_volume_name",
return_value=None)
def test_ensure_deleted_share(self, mock_qb_resolve_volname):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
self.assertRaises(exception.ShareResourceNotFound,
self._driver.ensure_share,
self._context, self.share, None)
(mock_qb_resolve_volname.
assert_called_once_with(self.share['name'],
self.share['project_id']))
@mock.patch.object(quobyte.QuobyteShareDriver, "_resize_share")
def test_extend_share(self, mock_qsd_resize_share):
self._driver.extend_share(ext_share=self.share,
ext_size=2,
share_server=None)
mock_qsd_resize_share.assert_called_once_with(share=self.share,
new_size=2)
def test_resize_share(self):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
self._driver._resize_share(share=self.share, new_size=7)
self._driver.rpc.call.assert_has_calls([
mock.call('setQuota',
{"consumer": {"type": 3,
"identifier": self.share["name"]},
"limits": {"type": 5, "value": 7}})])
@mock.patch.object(quobyte.QuobyteShareDriver, "_resize_share")
def test_shrink_share(self, mock_qsd_resize_share):
self._driver.shrink_share(shrink_share=self.share,
shrink_size=3,
share_server=None)
mock_qsd_resize_share.assert_called_once_with(share=self.share,
new_size=3)
| 39.426901 | 79 | 0.610427 |
import mock
from oslo_config import cfg
import six
from manila import context
from manila import exception
from manila.share import configuration as config
from manila.share import driver
from manila.share.drivers.quobyte import jsonrpc
from manila.share.drivers.quobyte import quobyte
from manila import test
from manila.tests import fake_share
CONF = cfg.CONF
def fake_rpc_handler(name, *args):
if name == 'resolveVolumeName':
return None
elif name == 'createVolume':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {'nfs_server_ip': 'fake_location',
'nfs_export_path': '/fake_share'}
class QuobyteShareDriverTestCase(test.TestCase):
def setUp(self):
super(QuobyteShareDriverTestCase, self).setUp()
self._context = context.get_admin_context()
CONF.set_default('driver_handles_share_servers', False)
self.fake_conf = config.Configuration(None)
self._driver = quobyte.QuobyteShareDriver(configuration=self.fake_conf)
self._driver.rpc = mock.Mock()
self.share = fake_share.fake_share(share_proto='NFS')
self.access = fake_share.fake_access()
@mock.patch('manila.share.drivers.quobyte.jsonrpc.JsonRpc', mock.Mock())
def test_do_setup_success(self):
self._driver.rpc.call = mock.Mock(return_value=None)
self._driver.do_setup(self._context)
self._driver.rpc.call.assert_called_with('getInformation', {})
@mock.patch('manila.share.drivers.quobyte.jsonrpc.JsonRpc.__init__',
mock.Mock(return_value=None))
@mock.patch.object(jsonrpc.JsonRpc, 'call',
side_effect=exception.QBRpcException)
def test_do_setup_failure(self, mock_call):
self.assertRaises(exception.QBException,
self._driver.do_setup, self._context)
def test_create_share_new_volume(self):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
result = self._driver.create_share(self._context, self.share)
self.assertEqual(self.share['export_location'], result)
self._driver.rpc.call.assert_has_calls([
mock.call('createVolume', dict(
name=self.share['name'],
tenant_domain=self.share['project_id'],
root_user_id=self.fake_conf.quobyte_default_volume_user,
root_group_id=self.fake_conf.quobyte_default_volume_group,
configuration_name=self.fake_conf.quobyte_volume_configuration
)),
mock.call('exportVolume',
dict(protocol='NFS', volume_uuid='voluuid'))])
def test_create_share_existing_volume(self):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
self._driver.create_share(self._context, self.share)
self._driver.rpc.call.assert_called_with(
'exportVolume', dict(protocol='NFS', volume_uuid='voluuid'))
def test_create_share_wrong_protocol(self):
share = {'share_proto': 'WRONG_PROTOCOL'}
self.assertRaises(exception.QBException,
self._driver.create_share,
context=None,
share=share)
def test_delete_share_existing_volume(self):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {}
self._driver.configuration.quobyte_delete_shares = True
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
self._driver.delete_share(self._context, self.share)
self._driver.rpc.call.assert_has_calls([
mock.call('resolveVolumeName',
{'volume_name': 'fakename',
'tenant_domain': 'fake_project_uuid'}),
mock.call('deleteVolume', {'volume_uuid': 'voluuid'}),
mock.call('exportVolume', {'volume_uuid': 'voluuid',
'remove_export': True})])
def test_delete_share_existing_volume_disabled(self):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {}
CONF.set_default('quobyte_delete_shares', False)
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
self._driver.delete_share(self._context, self.share)
self._driver.rpc.call.assert_called_with(
'exportVolume', {'volume_uuid': 'voluuid',
'remove_export': True})
@mock.patch.object(quobyte.LOG, 'warning')
def test_delete_share_nonexisting_volume(self, mock_warning):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return None
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
self._driver.delete_share(self._context, self.share)
mock_warning.assert_called_with(
'No volume found for share fake_project_uuid/fakename')
def test_allow_access(self):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {'nfs_server_ip': '10.10.1.1',
'nfs_export_path': '/voluuid'}
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
self._driver.allow_access(self._context, self.share, self.access)
self._driver.rpc.call.assert_called_with(
'exportVolume', {'volume_uuid': 'voluuid',
'read_only': False,
'add_allow_ip': '10.0.0.1'})
def test_allow_ro_access(self):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {'nfs_server_ip': '10.10.1.1',
'nfs_export_path': '/voluuid'}
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
ro_access = fake_share.fake_access(access_level='ro')
self._driver.allow_access(self._context, self.share, ro_access)
self._driver.rpc.call.assert_called_with(
'exportVolume', {'volume_uuid': 'voluuid',
'read_only': True,
'add_allow_ip': '10.0.0.1'})
def test_allow_access_nonip(self):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
self.access = fake_share.fake_access(**{"access_type":
"non_existant_access_type"})
self.assertRaises(exception.InvalidShareAccess,
self._driver.allow_access,
self._context, self.share, self.access)
def test_deny_access(self):
def rpc_handler(name, *args):
if name == 'resolveVolumeName':
return {'volume_uuid': 'voluuid'}
elif name == 'exportVolume':
return {'nfs_server_ip': '10.10.1.1',
'nfs_export_path': '/voluuid'}
self._driver.rpc.call = mock.Mock(wraps=rpc_handler)
self._driver.deny_access(self._context, self.share, self.access)
self._driver.rpc.call.assert_called_with(
'exportVolume',
{'volume_uuid': 'voluuid', 'remove_allow_ip': '10.0.0.1'})
@mock.patch.object(quobyte.LOG, 'debug')
def test_deny_access_nonip(self, mock_debug):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
self.access = fake_share.fake_access(
access_type="non_existant_access_type")
self._driver.deny_access(self._context, self.share, self.access)
mock_debug.assert_called_with(
'Quobyte driver only supports ip access control. '
'Ignoring deny access call for %s , %s',
'fakename', 'fake_project_uuid')
def test_resolve_volume_name(self):
self._driver.rpc.call = mock.Mock(
return_value={'volume_uuid': 'fake_uuid'})
self._driver._resolve_volume_name('fake_vol_name', 'fake_domain_name')
self._driver.rpc.call.assert_called_with(
'resolveVolumeName',
{'volume_name': 'fake_vol_name',
'tenant_domain': 'fake_domain_name'})
def test_resolve_volume_name_NOENT(self):
self._driver.rpc.call = mock.Mock(
return_value=None)
self.assertIsNone(
self._driver._resolve_volume_name('fake_vol_name',
'fake_domain_name'))
def test_resolve_volume_name_other_error(self):
self._driver.rpc.call = mock.Mock(
side_effect=exception.QBRpcException(
result='fubar',
qbcode=666))
self.assertRaises(exception.QBRpcException,
self._driver._resolve_volume_name,
volume_name='fake_vol_name',
tenant_domain='fake_domain_name')
@mock.patch.object(driver.ShareDriver, '_update_share_stats')
def test_update_share_stats(self, mock_uss):
self._driver._get_capacities = mock.Mock(return_value=[42, 23])
self._driver._update_share_stats()
mock_uss.assert_called_once_with(
dict(storage_protocol='NFS',
vendor_name='Quobyte',
share_backend_name=self._driver.backend_name,
driver_version=self._driver.DRIVER_VERSION,
total_capacity_gb=42,
free_capacity_gb=23,
reserved_percentage=0))
def test_get_capacities_gb(self):
capval = 42115548133
useval = 19695128917
self._driver.rpc.call = mock.Mock(
return_value={'total_logical_capacity': six.text_type(capval),
'total_logical_usage': six.text_type(useval)})
self.assertEqual((39.223160718, 20.880642548),
self._driver._get_capacities())
@mock.patch.object(quobyte.QuobyteShareDriver,
"_resolve_volume_name",
return_value="fake_uuid")
def test_ensure_share(self, mock_qb_resolve_volname):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
result = self._driver.ensure_share(self._context, self.share, None)
self.assertEqual(self.share["export_location"], result)
(mock_qb_resolve_volname.
assert_called_once_with(self.share['name'],
self.share['project_id']))
self._driver.rpc.call.assert_has_calls([
mock.call('exportVolume', dict(
volume_uuid="fake_uuid",
protocol='NFS'
))])
@mock.patch.object(quobyte.QuobyteShareDriver,
"_resolve_volume_name",
return_value=None)
def test_ensure_deleted_share(self, mock_qb_resolve_volname):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
self.assertRaises(exception.ShareResourceNotFound,
self._driver.ensure_share,
self._context, self.share, None)
(mock_qb_resolve_volname.
assert_called_once_with(self.share['name'],
self.share['project_id']))
@mock.patch.object(quobyte.QuobyteShareDriver, "_resize_share")
def test_extend_share(self, mock_qsd_resize_share):
self._driver.extend_share(ext_share=self.share,
ext_size=2,
share_server=None)
mock_qsd_resize_share.assert_called_once_with(share=self.share,
new_size=2)
def test_resize_share(self):
self._driver.rpc.call = mock.Mock(wraps=fake_rpc_handler)
self._driver._resize_share(share=self.share, new_size=7)
self._driver.rpc.call.assert_has_calls([
mock.call('setQuota',
{"consumer": {"type": 3,
"identifier": self.share["name"]},
"limits": {"type": 5, "value": 7}})])
@mock.patch.object(quobyte.QuobyteShareDriver, "_resize_share")
def test_shrink_share(self, mock_qsd_resize_share):
self._driver.shrink_share(shrink_share=self.share,
shrink_size=3,
share_server=None)
mock_qsd_resize_share.assert_called_once_with(share=self.share,
new_size=3)
| true | true |
1c383f0c22546fc778beaef16dcd29f57adc3e00 | 4,309 | py | Python | roles/api/files/scripts/export-fworch-config.py | NilsPur/firewall-orchestrator | 1e2dd61abc99b8dc72903abe9ac9c3172420b39f | [
"Apache-2.0"
] | null | null | null | roles/api/files/scripts/export-fworch-config.py | NilsPur/firewall-orchestrator | 1e2dd61abc99b8dc72903abe9ac9c3172420b39f | [
"Apache-2.0"
] | null | null | null | roles/api/files/scripts/export-fworch-config.py | NilsPur/firewall-orchestrator | 1e2dd61abc99b8dc72903abe9ac9c3172420b39f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python3
# export-fworch-config.py: export the full config of the product itself for later import
# does not contain any firewall config data, just the device config plus fworch user config
import sys, logging
import json, requests, requests.packages, argparse
base_dir = "/usr/local/fworch"
importer_base_dir = base_dir + '/importer'
sys.path.append(importer_base_dir)
import common, fwo_api
parser = argparse.ArgumentParser(
description='Export fworch configuration into encrypted json file')
parser.add_argument('-o', '--out', metavar='output_file', required=True, help='filename to write output in json format to')
parser.add_argument('-u', '--user', metavar='user_name', default='admin', help='username for getting fworch config (default=admin')
parser.add_argument('-p', '--password', metavar='password_file', default=base_dir + '/etc/secrets/ui_admin_pwd', help='username for getting fworch config (default=$FWORCH_HOME/etc/secrets/ui_admin_pwd')
parser.add_argument('-d', '--debug', metavar='debug_level', default='0',
help='Debug Level: 0=off, 1=send debug to console, 2=send debug to file, 3=keep temporary config files; default=0')
parser.add_argument('-x', '--proxy', metavar='proxy_string',
help='proxy server string to use, e.g. http://1.2.3.4:8080')
parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='',
help='[ca]certfile, if value not set, ssl check is off"; default=empty/off')
args = parser.parse_args()
if len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)
fwo_config_filename = base_dir + '/etc/fworch.json'
if args.ssl == '' or args.ssl == 'off':
requests.packages.urllib3.disable_warnings() # suppress ssl warnings only
debug_level = int(args.debug)
common.set_log_level(log_level=debug_level, debug_level=debug_level)
# read fwo config (API URLs)
with open(fwo_config_filename, "r") as fwo_config:
fwo_config = json.loads(fwo_config.read())
user_management_api_base_url = fwo_config['middleware_uri']
fwo_api_base_url = fwo_config['api_uri']
method = 'api/AuthenticationToken/Get'
ssl_mode = args.ssl
# authenticate to get JWT
with open(args.password, 'r') as file:
exporter_pwd = file.read().replace('\n', '')
if 'proxy' in args:
jwt = fwo_api.login(args.user, exporter_pwd, user_management_api_base_url,
method, ssl_verification=ssl_mode, proxy_string=args.proxy)
else:
jwt = fwo_api.login(args.user, exporter_pwd, user_management_api_base_url,
method, ssl_verification=ssl_mode)
config_json = {}
# get device details
mgm_query = """
query getFullDeviceDetails {
management {
mgm_id
mgm_name
ssh_hostname
ssh_port
ssh_private_key
ssh_public_key
ssh_user
dev_typ_id
config_path
do_not_import
force_initial_import
hide_in_gui
importer_hostname
mgm_comment
debug_level
mgm_create
mgm_update
last_import_md5_complete_config
}
device {
dev_id
dev_name
dev_typ_id
mgm_id
local_rulebase_name
global_rulebase_name
package_name
dev_comment
do_not_import
force_initial_import
hide_in_gui
dev_create
dev_update
}
}
"""
api_call_result = fwo_api.call(fwo_api_base_url, jwt, mgm_query, query_variables={}, role='admin')
if 'data' in api_call_result:
config_json.update({ 'device_configuration': api_call_result['data'] })
else:
logging.error('did not succeed in getting device details from API')
sys.exit(1)
# todo: use a single source for fwo_api between this script and importer
# todo: use a single source for graphql queries between importer, config im/exporter, C#
# todo: get more config data
# get user related data:
# ldap servers
# tenants
# uiusers including roles & groups & tenants
# todo: encrypt config before writing to file
with open(args.out, 'w') as file:
file.write(json.dumps(config_json, indent=3))
sys.exit(0)
| 36.516949 | 202 | 0.668369 |
import sys, logging
import json, requests, requests.packages, argparse
base_dir = "/usr/local/fworch"
importer_base_dir = base_dir + '/importer'
sys.path.append(importer_base_dir)
import common, fwo_api
parser = argparse.ArgumentParser(
description='Export fworch configuration into encrypted json file')
parser.add_argument('-o', '--out', metavar='output_file', required=True, help='filename to write output in json format to')
parser.add_argument('-u', '--user', metavar='user_name', default='admin', help='username for getting fworch config (default=admin')
parser.add_argument('-p', '--password', metavar='password_file', default=base_dir + '/etc/secrets/ui_admin_pwd', help='username for getting fworch config (default=$FWORCH_HOME/etc/secrets/ui_admin_pwd')
parser.add_argument('-d', '--debug', metavar='debug_level', default='0',
help='Debug Level: 0=off, 1=send debug to console, 2=send debug to file, 3=keep temporary config files; default=0')
parser.add_argument('-x', '--proxy', metavar='proxy_string',
help='proxy server string to use, e.g. http://1.2.3.4:8080')
parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='',
help='[ca]certfile, if value not set, ssl check is off"; default=empty/off')
args = parser.parse_args()
if len(sys.argv) == 1:
parser.print_help(sys.stderr)
sys.exit(1)
fwo_config_filename = base_dir + '/etc/fworch.json'
if args.ssl == '' or args.ssl == 'off':
requests.packages.urllib3.disable_warnings() # suppress ssl warnings only
debug_level = int(args.debug)
common.set_log_level(log_level=debug_level, debug_level=debug_level)
# read fwo config (API URLs)
with open(fwo_config_filename, "r") as fwo_config:
fwo_config = json.loads(fwo_config.read())
user_management_api_base_url = fwo_config['middleware_uri']
fwo_api_base_url = fwo_config['api_uri']
method = 'api/AuthenticationToken/Get'
ssl_mode = args.ssl
# authenticate to get JWT
with open(args.password, 'r') as file:
exporter_pwd = file.read().replace('\n', '')
if 'proxy' in args:
jwt = fwo_api.login(args.user, exporter_pwd, user_management_api_base_url,
method, ssl_verification=ssl_mode, proxy_string=args.proxy)
else:
jwt = fwo_api.login(args.user, exporter_pwd, user_management_api_base_url,
method, ssl_verification=ssl_mode)
config_json = {}
# get device details
mgm_query = """
query getFullDeviceDetails {
management {
mgm_id
mgm_name
ssh_hostname
ssh_port
ssh_private_key
ssh_public_key
ssh_user
dev_typ_id
config_path
do_not_import
force_initial_import
hide_in_gui
importer_hostname
mgm_comment
debug_level
mgm_create
mgm_update
last_import_md5_complete_config
}
device {
dev_id
dev_name
dev_typ_id
mgm_id
local_rulebase_name
global_rulebase_name
package_name
dev_comment
do_not_import
force_initial_import
hide_in_gui
dev_create
dev_update
}
}
"""
api_call_result = fwo_api.call(fwo_api_base_url, jwt, mgm_query, query_variables={}, role='admin')
if 'data' in api_call_result:
config_json.update({ 'device_configuration': api_call_result['data'] })
else:
logging.error('did not succeed in getting device details from API')
sys.exit(1)
# todo: use a single source for fwo_api between this script and importer
# todo: use a single source for graphql queries between importer, config im/exporter, C#
# todo: get more config data
# get user related data:
# ldap servers
# tenants
# uiusers including roles & groups & tenants
# todo: encrypt config before writing to file
with open(args.out, 'w') as file:
file.write(json.dumps(config_json, indent=3))
sys.exit(0)
| true | true |
1c383f218af36d693de4546a8854dc5ea2264b12 | 785 | py | Python | resource_group.py | gfortil/hpcc-internship | 8061771bcb4791fca54e6b1d74f0c019ad69bca4 | [
"MIT"
] | null | null | null | resource_group.py | gfortil/hpcc-internship | 8061771bcb4791fca54e6b1d74f0c019ad69bca4 | [
"MIT"
] | null | null | null | resource_group.py | gfortil/hpcc-internship | 8061771bcb4791fca54e6b1d74f0c019ad69bca4 | [
"MIT"
] | 1 | 2021-06-10T22:07:15.000Z | 2021-06-10T22:07:15.000Z | import subprocess
from config import region_codes, resource_prefix, spot_region_map
def create_resource_group(regions, prefix):
for location in regions:
resource_group_name = prefix + str(location)
command = 'az group create --name {} --location {}'.format(resource_group_name, location)
status, result = subprocess.getstatusoutput(command)
print(status, result)
def delete_resource_group(regions, prefix):
for location in regions:
resource_group_name = prefix + str(location)
command = 'az group delete --name {} --yes'.format(resource_group_name)
status, result = subprocess.getstatusoutput(command)
print(status, result)
if __name__ == '__main__':
create_resource_group(region_codes, resource_prefix) | 37.380952 | 98 | 0.719745 | import subprocess
from config import region_codes, resource_prefix, spot_region_map
def create_resource_group(regions, prefix):
for location in regions:
resource_group_name = prefix + str(location)
command = 'az group create --name {} --location {}'.format(resource_group_name, location)
status, result = subprocess.getstatusoutput(command)
print(status, result)
def delete_resource_group(regions, prefix):
for location in regions:
resource_group_name = prefix + str(location)
command = 'az group delete --name {} --yes'.format(resource_group_name)
status, result = subprocess.getstatusoutput(command)
print(status, result)
if __name__ == '__main__':
create_resource_group(region_codes, resource_prefix) | true | true |
1c383f8b680f153f22ac31cf1562d4db70ad8920 | 5,771 | py | Python | pychron/experiment/automated_run/factory_view.py | UManPychron/pychron | b84c9fd70072f9cbda30abe2c471e64fe3dd75d8 | [
"Apache-2.0"
] | null | null | null | pychron/experiment/automated_run/factory_view.py | UManPychron/pychron | b84c9fd70072f9cbda30abe2c471e64fe3dd75d8 | [
"Apache-2.0"
] | 11 | 2015-10-08T14:21:39.000Z | 2022-01-26T14:19:21.000Z | pychron/experiment/automated_run/factory_view.py | UManPychron/pychron | b84c9fd70072f9cbda30abe2c471e64fe3dd75d8 | [
"Apache-2.0"
] | null | null | null | # ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import HasTraits, Instance
from traitsui.api import View, Item, VGroup, Spring, HGroup, ButtonEditor
from pychron.core.pychron_traits import BorderVGroup
from pychron.core.ui.enum_editor import myEnumEditor
from pychron.envisage.icon_button_editor import icon_button_editor
POSITION_TOOLTIP = '''Set the position for this analysis or group of analyses.
Examples:
1. 4 or p4 (goto position 4)
2. 3,4,5 (goto positions 3,4,5. treat as one analysis)
3. 7-12 (goto positions 7,8,9,10,11,12. treat as individual analyses)
4. 7:12 (same as #3)
5. 10:16:2 (goto positions 10,12,14,16. treat as individual analyses)
6. D1 (drill position 1)
7. T1-2 (goto named position T1-2 i.e transect 1, point 2)
8. L3 (trace path L3)
9. 1-6;9;11;15-20 (combination of rules 2. and 3. treat all positions as individual analyses)
10. 1.0,2.0 (goto the point defined by x,y[,z]. Use ";" to treat multiple points as one analysis e.g 1.0,2.0;3.0,4.0)
'''
PATTERN_TOOLTIP = 'Select a pattern from Remote or Local Patterns. \
If unsure from which group to choice use a "Remote" pattern'
class FactoryView(HasTraits):
model = Instance('pychron.experiment.automated_run.factory.AutomatedRunFactory')
def trait_context(self):
return {'object': self.model}
def traits_view(self):
v = View(self._get_group())
return v
def _get_group(self):
sspring = lambda width=17: Spring(springy=False, width=width)
extract_grp = BorderVGroup(HGroup(sspring(width=33),
Item('extract_value', label='Extract',
tooltip='Set the extract value in extract units',
enabled_when='extractable'),
Item('extract_units',
show_label=False,
editor=myEnumEditor(name='extract_units_names')),
Item('ramp_duration', label='Ramp Dur. (s)'), ),
HGroup(Item('use_cdd_warming', label='CDD Warm',
tooltip='Use the CDD warming routine at end of measurement'),
# Item('collection_time_zero_offset',
# label='T_o offset (s)',
# tooltip='# of seconds afer inlet opens to set time zero'),
Item('overlap', label='Overlap (s)',
tooltip='Duration to wait before staring next run')),
self._step_heat_group(),
HGroup(Item('duration', label='Duration (s)',
tooltip='Set the number of seconds to run the extraction device.'),
Item('pre_cleanup', label='Pre Cleanup (s)'),
Item('cleanup', label='Cleanup (s)',
tooltip='Set the number of seconds to getter the sample gas'),
Item('post_cleanup', label='Post Cleanup (s)')),
HGroup(Item('beam_diameter'), Item('light_value', label='Lighting')),
self._position_group(),
label='Extract')
post_measurement_group = BorderVGroup(Item('delay_after'), label='Post Measurement')
grp = VGroup(extract_grp, post_measurement_group)
return grp
def _position_group(self):
grp = HGroup(Item('position',
tooltip=POSITION_TOOLTIP),
Item('pattern',
show_label=False,
tooltip=PATTERN_TOOLTIP,
editor=myEnumEditor(name='patterns')),
Item('edit_pattern',
show_label=False,
editor=ButtonEditor(label_value='edit_pattern_label')))
return grp
def _step_heat_group(self):
grp = HGroup(Item('template',
label='Step Heat Template',
editor=myEnumEditor(name='templates'),
show_label=False, ),
Item('edit_template',
show_label=False,
editor=ButtonEditor(label_value='edit_template_label')),
icon_button_editor('apply_stepheat', 'arrow_right',
enabled_when='_selected_runs',
tooltip='Apply step heat template to selected'))
return grp
# ============= EOF =============================================
| 51.990991 | 117 | 0.515162 |
from traits.api import HasTraits, Instance
from traitsui.api import View, Item, VGroup, Spring, HGroup, ButtonEditor
from pychron.core.pychron_traits import BorderVGroup
from pychron.core.ui.enum_editor import myEnumEditor
from pychron.envisage.icon_button_editor import icon_button_editor
POSITION_TOOLTIP = '''Set the position for this analysis or group of analyses.
Examples:
1. 4 or p4 (goto position 4)
2. 3,4,5 (goto positions 3,4,5. treat as one analysis)
3. 7-12 (goto positions 7,8,9,10,11,12. treat as individual analyses)
4. 7:12 (same as #3)
5. 10:16:2 (goto positions 10,12,14,16. treat as individual analyses)
6. D1 (drill position 1)
7. T1-2 (goto named position T1-2 i.e transect 1, point 2)
8. L3 (trace path L3)
9. 1-6;9;11;15-20 (combination of rules 2. and 3. treat all positions as individual analyses)
10. 1.0,2.0 (goto the point defined by x,y[,z]. Use ";" to treat multiple points as one analysis e.g 1.0,2.0;3.0,4.0)
'''
PATTERN_TOOLTIP = 'Select a pattern from Remote or Local Patterns. \
If unsure from which group to choice use a "Remote" pattern'
class FactoryView(HasTraits):
model = Instance('pychron.experiment.automated_run.factory.AutomatedRunFactory')
def trait_context(self):
return {'object': self.model}
def traits_view(self):
v = View(self._get_group())
return v
def _get_group(self):
sspring = lambda width=17: Spring(springy=False, width=width)
extract_grp = BorderVGroup(HGroup(sspring(width=33),
Item('extract_value', label='Extract',
tooltip='Set the extract value in extract units',
enabled_when='extractable'),
Item('extract_units',
show_label=False,
editor=myEnumEditor(name='extract_units_names')),
Item('ramp_duration', label='Ramp Dur. (s)'), ),
HGroup(Item('use_cdd_warming', label='CDD Warm',
tooltip='Use the CDD warming routine at end of measurement'),
Item('overlap', label='Overlap (s)',
tooltip='Duration to wait before staring next run')),
self._step_heat_group(),
HGroup(Item('duration', label='Duration (s)',
tooltip='Set the number of seconds to run the extraction device.'),
Item('pre_cleanup', label='Pre Cleanup (s)'),
Item('cleanup', label='Cleanup (s)',
tooltip='Set the number of seconds to getter the sample gas'),
Item('post_cleanup', label='Post Cleanup (s)')),
HGroup(Item('beam_diameter'), Item('light_value', label='Lighting')),
self._position_group(),
label='Extract')
post_measurement_group = BorderVGroup(Item('delay_after'), label='Post Measurement')
grp = VGroup(extract_grp, post_measurement_group)
return grp
def _position_group(self):
grp = HGroup(Item('position',
tooltip=POSITION_TOOLTIP),
Item('pattern',
show_label=False,
tooltip=PATTERN_TOOLTIP,
editor=myEnumEditor(name='patterns')),
Item('edit_pattern',
show_label=False,
editor=ButtonEditor(label_value='edit_pattern_label')))
return grp
def _step_heat_group(self):
grp = HGroup(Item('template',
label='Step Heat Template',
editor=myEnumEditor(name='templates'),
show_label=False, ),
Item('edit_template',
show_label=False,
editor=ButtonEditor(label_value='edit_template_label')),
icon_button_editor('apply_stepheat', 'arrow_right',
enabled_when='_selected_runs',
tooltip='Apply step heat template to selected'))
return grp
| true | true |
1c384065f5f0c168b0082b1c67a44eacd1b94183 | 1,376 | py | Python | service/__init__.py | nyu-devops-wishlist/wishlist | b0a99a6f9905dc8547ccbc6375e5d6db512102cf | [
"Apache-2.0"
] | 1 | 2020-03-05T21:44:09.000Z | 2020-03-05T21:44:09.000Z | service/__init__.py | nyu-devops-wishlist/wishlist | b0a99a6f9905dc8547ccbc6375e5d6db512102cf | [
"Apache-2.0"
] | 79 | 2020-02-25T23:55:32.000Z | 2020-05-12T17:44:53.000Z | service/__init__.py | nyu-devops-wishlist/wishlist | b0a99a6f9905dc8547ccbc6375e5d6db512102cf | [
"Apache-2.0"
] | 1 | 2020-03-10T21:58:13.000Z | 2020-03-10T21:58:13.000Z | """
Package: service
Package for the application models and service routes
This module creates and configures the Flask app and sets up the logging
and SQL database
"""
import os
import sys
import logging
from flask import Flask
# Create Flask application
app = Flask(__name__)
app.config.from_object('config')
# Import the rutes After the Flask app is created
from service import service, models
# Set up logging for production
if __name__ != '__main__':
gunicorn_logger = logging.getLogger('gunicorn.error')
app.logger.handlers = gunicorn_logger.handlers
app.logger.setLevel(gunicorn_logger.level)
app.logger.propagate = False
# Make all log formats consistent
formatter = logging.Formatter("[%(asctime)s] [%(levelname)s] [%(module)s] %(message)s", "%Y-%m-%d %H:%M:%S %z")
for handler in app.logger.handlers:
handler.setFormatter(formatter)
app.logger.info('Logging handler established')
app.logger.info(70 * "*")
app.logger.info(" TEST S E R V I C E R U N N I N G ".center(70, "*"))
app.logger.info(70 * "*")
try:
service.init_db() # make our sqlalchemy tables
except Exception as error:
app.logger.critical("%s: Cannot continue", error)
# gunicorn requires exit code 4 to stop spawning workers when they die
sys.exit(4)
app.logger.info("Service inititalized!")
| 32 | 116 | 0.694767 | import os
import sys
import logging
from flask import Flask
app = Flask(__name__)
app.config.from_object('config')
from service import service, models
if __name__ != '__main__':
gunicorn_logger = logging.getLogger('gunicorn.error')
app.logger.handlers = gunicorn_logger.handlers
app.logger.setLevel(gunicorn_logger.level)
app.logger.propagate = False
formatter = logging.Formatter("[%(asctime)s] [%(levelname)s] [%(module)s] %(message)s", "%Y-%m-%d %H:%M:%S %z")
for handler in app.logger.handlers:
handler.setFormatter(formatter)
app.logger.info('Logging handler established')
app.logger.info(70 * "*")
app.logger.info(" TEST S E R V I C E R U N N I N G ".center(70, "*"))
app.logger.info(70 * "*")
try:
service.init_db()
except Exception as error:
app.logger.critical("%s: Cannot continue", error)
sys.exit(4)
app.logger.info("Service inititalized!")
| true | true |
1c384344aae69c08b0bf6b297a873f04d8fd7c49 | 49,993 | py | Python | parser/team23/interface.py | itsmjoe/tytus | 3b0341cc854d67979b766c5c8b06ed172ce0c913 | [
"MIT"
] | null | null | null | parser/team23/interface.py | itsmjoe/tytus | 3b0341cc854d67979b766c5c8b06ed172ce0c913 | [
"MIT"
] | null | null | null | parser/team23/interface.py | itsmjoe/tytus | 3b0341cc854d67979b766c5c8b06ed172ce0c913 | [
"MIT"
] | null | null | null | from tkinter import *
from tkinter import ttk
from tkinter import filedialog
from PIL import Image, ImageTk
import grammar.sql_grammar as gramatica
from graphviz import Source
from tools.console_text import *
from error.errores import *
from tools.tabla_simbolos import *
import os
class window:
def __init__(self):
#Variables
self.ventana = Tk() #Ventana
self.color = "darkgray" #Color editor
self.dir_os = os.path.dirname(__file__) #Dir
self.count_tabs = 1 #Conteo de tabs
self.tabControl = ttk.Notebook(self.ventana,height=500) #Notebook contenedor tabs
self.tab_salida = ttk.Notebook(self.ventana, height=100) #Notebook contenedor de salidas
self.my_status = StringVar()
#Configuracion ventana
self.ventana.title("Interfaz para compiladores")
self.ventana.geometry("900x700")
self.ventana.config(bg=self.color)
#Creando widgets
self.create_menu_bar()
self.create_tool_bar()
self.add_tab("Untitled-"+str(self.count_tabs))
self.create_consola()
self.create_status_bar()
#Search and Replace vars
self.search_text = ""
self.replace_text = ""
#Ejecución de la ventana
def run(self):
self.ventana.mainloop()
#Widgets de la interfaz
def create_menu_bar(self):
my_menu = Menu(self.ventana)
self.ventana.config(menu = my_menu)
#Items del menu
file_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label="File", menu=file_menu)
edit_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label="Edit", menu=edit_menu)
tools_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label="Tools", menu=tools_menu)
options_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label="Options", menu=options_menu)
help_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label="Help", menu=help_menu)
#Items de submenu
file_menu.add_command(label="New File", command=self.new_file, accelerator="Ctrl+N")
file_menu.add_separator()
file_menu.add_command(label="Open File", command=self.open_file , accelerator="Ctrl+O")
file_menu.add_separator()
file_menu.add_command(label="Save File", command=self.save_file, accelerator="Ctrl+Shift-S")
file_menu.add_command(label="Save As", command=self.save_as, accelerator="Ctrl+S")
file_menu.add_separator()
file_menu.add_command(label="Close Tab", command=self.delete_tab, accelerator="Ctrl+W")
file_menu.add_command(label="Exit", command=self.ventana.quit)
edit_menu.add_command(label="Copy", command=lambda:self.ventana.focus_get().event_generate('<<Copy>>'))
edit_menu.add_command(label="Paste", command=lambda:self.ventana.focus_get().event_generate('<<Paste>>'))
edit_menu.add_command(label="Cut", command=lambda:self.ventana.focus_get().event_generate('<<Cut>>'))
edit_menu.add_separator()
edit_menu.add_command(label="Search", command=self.find_popup, accelerator="Ctrl+F")
edit_menu.add_command(label="Replace", command=self.replace_popup, accelerator="Ctrl+H")
tools_menu.add_command(label="Ejecutar", command=self.ejecutar_codigo, accelerator="F5")
tools_menu.add_separator()
tools_menu.add_command(label="AST", command = self.compilar_AST_pdf)
tools_menu.add_separator()
tools_menu.add_command(label = "Errores Lexicos", command = self.compilar_lexico_pdf)
tools_menu.add_command(label = "Errores Sintacticos", command = self.compilar_sintactico_pdf)
tools_menu.add_command(label = "Errores Semanticos", command = self.compilar_semantico_pdf)
tools_menu.add_command(label = "Todos los errores", command = self.compilar_Error_pdf)
tools_menu.add_separator()
tools_menu.add_command(label = "Reporte Gramatical", command = self.compilar_grammar_pdf)
tools_menu.add_separator()
tools_menu.add_command(label = "Tabla de Simbolos", command=self.compilar_ts_pdf)
#tools_menu.add_command(label="Debug", command=self.open_file, accelerator="F5")
theme_menu = Menu(options_menu, tearoff=0)
options_menu.add_cascade(label="Theme", menu=theme_menu)
#options_menu.add_command(label="Line Number", command=self.open_file)
theme_menu.add_command(label="Default", command=self.default_theme)
theme_menu.add_command(label="Light Gray", command=self.gray_theme)
theme_menu.add_command(label="Dark Night", command=self.dark_night_theme)
theme_menu.add_command(label="Light Blue", command=self.light_blue_theme)
theme_menu.add_command(label="Dark", command=self.dark_theme)
help_menu.add_command(label="Help")
help_menu.add_command(label="About", command=self.popup_about)
def popup_about(self):
popup = Tk()
popup.wm_title("About")
popup.geometry("330x190")
popup.resizable(False, False)
label = ttk.Label(popup, text="------------------ EDITOR COMPILADORES 2 ------------------", relief="sunken")
label.pack(side="top", fill="x", pady=3)
label = ttk.Label(popup, text="Versión: 1.51.1 (system setup)")
label.pack(side="top", fill="x", pady=3)
label = ttk.Label(popup, text="Confirmación: -----------------------------------")
label.pack(side="top", fill="x", pady=3)
label = ttk.Label(popup, text="Fecha: 2020-12-10T08:44:32")
label.pack(side="top", fill="x", pady=3)
label = ttk.Label(popup, text="Sistema Operativo: Windows_NT x64 10.0.18363")
label.pack(side="top", fill="x", pady=3)
label = ttk.Label(popup, text="Developer: Luis Fernando Arana Arias - 201700988\nPedro Rolando Ordoñez Carrillo - 201701187\nSteven Aaron Sis Hernandez - 201706357\nDavis Francisco Edward Enriquez - 201700972")
label.pack(side="top", fill="x", pady=3)
B1 = ttk.Button(popup, text="Close", command = popup.destroy)
B1.pack()
popup.mainloop()
def pop_alert(self, msg):
gui = Tk()
gui.title("Alerta")
gui.geometry("230x60")
gui.resizable(False,False)
label = ttk.Label(gui, text=msg)
label.pack(side="top", fill="x", pady=3)
B1 = ttk.Button(gui, text="Close", command = gui.destroy)
B1.pack()
gui.mainloop()
def find_popup(self):
popup = Tk()
popup.title("Buscar")
popup.geometry("250x30")
popup.resizable(False, False)
Label(popup, text="> ").pack(side=LEFT)
txtbox = Entry(popup)
txtbox.pack(side = LEFT, fill = BOTH, expand = 1, pady=2)
txtbox.focus_set()
Find = Button(popup, text ='Buscar')
Find.pack(side = LEFT)
def find_func():
tab_list = self.tabControl.winfo_children()
current_tab = tab_list[self.tabControl.index(CURRENT)]
txt_box = None
for widget_item in current_tab.winfo_children():
if isinstance(widget_item, Text):
txt_box = widget_item
text = txtbox.get()
txt_box.tag_remove('found', '1.0', END)
if (text):
idx = '1.0'
while 1:
idx = txt_box.search(text, idx, nocase = 1, stopindex = END)
if not idx: break
lastidx = '% s+% dc' % (idx, len(text))
txt_box.tag_add('found', idx, lastidx)
idx = lastidx
txt_box.tag_config('found', foreground='red', background ='#CACACA')
Find.config(command = find_func)
def replace_popup(self):
popup = Tk()
popup.title("Buscar y Remplazar")
popup.geometry("350x30")
popup.resizable(False, False)
Label(popup, text="> ").pack(side=LEFT)
txtbox = Entry(popup)
txtbox.pack(side = LEFT, fill = BOTH, expand = 1, pady=2)
txtbox.focus_set()
Label(popup, text="> ").pack(side=LEFT)
txtbox2 = Entry(popup)
txtbox2.pack(side = LEFT, fill = BOTH, expand = 1, pady=2)
txtbox2.focus_set()
Replace = Button(popup, text ='Remplazar')
Replace.pack(side = LEFT)
def replace_func():
tab_list = self.tabControl.winfo_children()
current_tab = tab_list[self.tabControl.index(CURRENT)]
txt_box = None
for widget_item in current_tab.winfo_children():
if isinstance(widget_item, Text):
txt_box = widget_item
text_find = txtbox.get()
text_replace = txtbox2.get()
txt_box.tag_remove('found', '1.0', END)
if (text_find):
idx = '1.0'
while 1:
idx = txt_box.search(text_find, idx, nocase = 1, stopindex = END)
if not idx: break
lastidx = '% s+% dc' % (idx, len(text_find))
txt_box.delete(idx, lastidx)
txt_box.insert(idx, text_replace)
lastidx = '% s+% dc' % (idx, len(text_replace))
txt_box.tag_add('found', idx, lastidx)
idx = lastidx
txt_box.tag_config('found', foreground ='green', background = 'yellow')
Replace.config(command = replace_func)
def create_tool_bar(self):
#Barra de herramientas
myTool = Frame(self.ventana)
#Botones de la barra de herramientas
imgOpen = Image.open(self.dir_os +'/assets/open.png')
imgOpen = imgOpen.resize((20,20), Image.ANTIALIAS)
imgOpen = ImageTk.PhotoImage(imgOpen)
OpenBtn = Button(myTool, image=imgOpen, command=self.open_file)
OpenBtn.image = imgOpen
OpenBtn.pack(side=LEFT, padx=2, pady=2)
imgFile = Image.open(self.dir_os +'/assets/file.png')
imgFile = imgFile.resize((20, 20), Image.ANTIALIAS)
imgFile = ImageTk.PhotoImage(imgFile)
FileBtn = Button(myTool, image=imgFile, command=self.new_file)
FileBtn.image = imgFile
FileBtn.pack(side=LEFT, padx=2, pady=2)
imgSave = Image.open(self.dir_os +'/assets/save.png')
imgSave = imgSave.resize((20, 20), Image.ANTIALIAS)
imgSave = ImageTk.PhotoImage(imgSave)
SaveBtn = Button(myTool, image=imgSave, command=self.save_as)
SaveBtn.image = imgSave
SaveBtn.pack(side=LEFT, padx=2, pady=2)
imgSearch = Image.open(self.dir_os +'/assets/search.png')
imgSearch = imgSearch.resize((20, 20), Image.ANTIALIAS)
imgSearch = ImageTk.PhotoImage(imgSearch)
SearchBtn = Button(myTool, image=imgSearch, command=self.find_popup)
SearchBtn.image = imgSearch
SearchBtn.pack(side=LEFT, padx=2, pady=2)
imgDebug = Image.open(self.dir_os +'/assets/debug.png')
imgDebug = imgDebug.resize((20, 20), Image.ANTIALIAS)
imgDebug = ImageTk.PhotoImage(imgDebug)
DebugBtn = Button(myTool, image=imgDebug, command=self.open_file)
DebugBtn.image = imgDebug
DebugBtn.pack(side=RIGHT, padx=2, pady=2)
imgExecute = Image.open(self.dir_os +'/assets/execute.png')
imgExecute = imgExecute.resize((20, 20), Image.ANTIALIAS)
imgExecute = ImageTk.PhotoImage(imgExecute)
ExecuteBtn = Button(myTool, image=imgExecute, command=self.ejecutar_codigo)
ExecuteBtn.image = imgExecute
ExecuteBtn.pack(side=RIGHT, padx=2, pady=2)
imgAbout = Image.open(self.dir_os +'/assets/about.png')
imgAbout = imgAbout.resize((20, 20), Image.ANTIALIAS)
imgAbout = ImageTk.PhotoImage(imgAbout)
AboutBtn = Button(myTool, image=imgAbout, command=self.popup_about)
AboutBtn.image = imgAbout
AboutBtn.pack(side=LEFT, padx=2, pady=2)
imgClose = Image.open(self.dir_os +'/assets/close.png')
imgClose = imgClose.resize((20, 20), Image.ANTIALIAS)
imgClose = ImageTk.PhotoImage(imgClose)
CloseBtn = Button(myTool, image=imgClose, command=self.delete_tab)
CloseBtn.image = imgClose
CloseBtn.pack(side=LEFT, padx=2, pady=2)
imgClear = Image.open(self.dir_os +'/assets/clear.png')
imgClear = imgClear.resize((20, 20), Image.ANTIALIAS)
imgClear = ImageTk.PhotoImage(imgClear)
ClearBtn = Button(myTool, image=imgClear, command=self.clear_consola)
ClearBtn.image = imgClear
ClearBtn.pack(side=LEFT, padx=2, pady=2)
imgAst = Image.open(self.dir_os +'/assets/ast.png')
imgAst = imgAst.resize((20, 20), Image.ANTIALIAS)
imgAst = ImageTk.PhotoImage(imgAst)
AstBtn = Button(myTool, image=imgAst, command=self.compilar_AST_pdf)
AstBtn.image = imgAst
AstBtn.pack(side=LEFT, padx=2, pady=2)
imgErrores = Image.open(self.dir_os +'/assets/error.png')
imgErrores = imgErrores.resize((20, 20), Image.ANTIALIAS)
imgErrores = ImageTk.PhotoImage(imgErrores)
ErroresBtn = Button(myTool, image=imgErrores, command=self.compilar_Error_pdf)
ErroresBtn.image = imgErrores
ErroresBtn.pack(side=LEFT, padx=2, pady=2)
imgGrammar = Image.open(self.dir_os +'/assets/grammar.png')
imgGrammar = imgGrammar.resize((20, 20), Image.ANTIALIAS)
imgGrammar = ImageTk.PhotoImage(imgGrammar)
GrammarBtn = Button(myTool, image=imgGrammar, command=self.compilar_grammar_pdf)
GrammarBtn.image = imgGrammar
GrammarBtn.pack(side=LEFT, padx=2, pady=2)
imgSimbolo = Image.open(self.dir_os +'/assets/simbolos.png')
imgSimbolo = imgSimbolo.resize((20, 20), Image.ANTIALIAS)
imgSimbolo = ImageTk.PhotoImage(imgSimbolo)
SimboloBtn = Button(myTool, image=imgSimbolo, command=self.compilar_ts_pdf)
SimboloBtn.image = imgSimbolo
SimboloBtn.pack(side=LEFT, padx=2, pady=2)
myTool.pack(side=TOP, fill=X)
def clear_consola(self):
limpiar_consola()
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.delete('1.0', END)
widget_item.insert(INSERT, get_contenido())
def create_consola(self):
tab_consola = ttk.Frame(self.tab_salida)
new_scroll = Scrollbar(tab_consola)
font_spec = ("Consolas", 11)
consola = Text(tab_consola, font=font_spec)
consola.pack(side=LEFT, fill=BOTH, expand=TRUE)
consola.config(yscrollcommand=new_scroll.set)
consola.insert(INSERT, contenido_consola)
new_scroll.pack(side=RIGHT, fill=Y)
new_scroll.config(command=consola.yview)
self.tab_salida.add(tab_consola, text="Consola")
self.tab_salida.pack(side=TOP, fill=BOTH, expand=TRUE)
def add_tab(self, title):
new_tab = ttk.Frame(self.tabControl)
font_spec = ("Consolas", 11)
new_scroll = Scrollbar(new_tab)
txt_numbers = self.set_line_numbers(new_tab)
new_textarea = Text(new_tab, font=font_spec)
def double_scroll(self, *args):
txt_numbers.yview_moveto(*args)
new_textarea.yview_moveto(*args)
new_scroll.config(command=double_scroll)
def update_scroll(first, last):
txt_numbers.yview_moveto(first)
new_scroll.set(first, last)
txt_numbers.configure(yscrollcommand=update_scroll)
new_textarea.configure(yscrollcommand=update_scroll)
new_textarea.bind('<Return>', lambda event, txt_number=txt_numbers, txt_area=new_textarea: self.update_line_number(txt_number, new_textarea))
new_textarea.bind('<BackSpace>', lambda event, txt_number=txt_numbers, txt_area=new_textarea: self.update_line_number_back(txt_number, new_textarea))
new_textarea.bind('<Control-Return>', lambda event, txt_number=txt_numbers, txt_area=new_textarea: self.update_line_number_back(txt_number, new_textarea))
new_scroll.pack(side=RIGHT, fill=Y)
new_textarea.pack(side=LEFT, fill=BOTH, expand=TRUE)
self.update_line_number_back(txt_numbers, new_textarea)
self.bind_shortcuts(new_textarea)
self.color_font_config(new_textarea)
self.tabControl.add(new_tab, text=title)
self.tabControl.select(self.count_tabs-1)
self.tabControl.pack(side=TOP, fill=BOTH, expand=TRUE)
def create_status_bar(self):
self.my_status.set("Editor - 0.1")
font_spec = ("Consolas", 11)
label = Label(self.ventana, textvariable=self.my_status, fg="black", bg="lightgrey", anchor="sw", font=font_spec)
label.pack(side=BOTTOM, fill=BOTH)
def set_line_numbers(self, tab_item):
line_number = Text(tab_item, width=4)
line_number.pack(side=LEFT, fill=Y)
font_spec = ("Consolas", 11)
line_number.config(font=font_spec)
line_number.config(state=DISABLED)
line_number.config(background="#BBBDCC")
return line_number
#Funciones
def open_file(self, *args):
file_name = None
file_name = filedialog.askopenfilename(
defaultextension = ".txt",
filetypes = [("All Files", "*.*"),
("Text Files", "*.txt"),
("Python Scripts", "*.py")])
if file_name:
index_tab = self.tab_libre()
tab = None
if index_tab != -1:
self.tabControl.tab(index_tab, text=file_name)
tabs_list = self.tabControl.winfo_children()
tab = tabs_list[index_tab]
self.tabControl.select(index_tab)
else:
self.count_tabs +=1
self.add_tab(file_name)
tabs_list = self.tabControl.winfo_children()
tab = tabs_list[self.count_tabs - 1]
txt_box = None
widget_list = tab.winfo_children()
for widget_item in widget_list:
if isinstance(widget_item, Text):
txt_box = widget_item
with open(file_name, "r") as f:
txt_box.insert("1.0", f.read())
self.update_line_number_back(widget_list[1], widget_list[2])
self.color_font_config(widget_list[2])
def new_file(self, *args):
self.count_tabs +=1
self.add_tab("Untitled-"+str(self.count_tabs))
def tab_libre(self):
if self.count_tabs == 0:
return -1
tab_ideal = 0
tabs_list = self.tabControl.winfo_children()
for tab_item in tabs_list:
widget_list = tab_item.winfo_children()
for widget_item in widget_list:
if isinstance(widget_item, Text):
contenido_txt = widget_item.get("1.0",END)
if contenido_txt == "\n":
return tab_ideal
tab_ideal += 1
return -1
def delete_tab(self, *args):
for item in self.tabControl.winfo_children():
if str(item)==self.tabControl.select():
item.destroy()
self.count_tabs -= 1
return
def save_as(self, *args):
try:
new_file = filedialog.asksaveasfilename(
initialfile="Untitled-" + str(self.tabControl.index(CURRENT) + 1 ),
defaultextension = ".txt",
filetypes = [("All Files", "*.*"),
("Text Files", "*.txt"),
("Python Scripts", "*.py")])
tab_list = self.tabControl.winfo_children()
current_tab = tab_list[self.tabControl.index(CURRENT)]
txt_box = None
for widget_item in current_tab.winfo_children():
if isinstance(widget_item, Text):
txt_box = widget_item
contenido = txt_box.get(1.0, END)
with open(new_file, "w") as f:
f.write(contenido)
self.tabControl.tab(self.tabControl.index(CURRENT), text=new_file)
self.update_status_bar(0)
except Exception as er:
print(er)
def save_file(self, *args):
nombre_eval = "Untitled-"
actual_name = self.tabControl.tab(CURRENT, "text")
nombre_aux = actual_name[:-1]
if nombre_aux != nombre_eval:
try:
tab_list = self.tabControl.winfo_children()
current_tab = tab_list[self.tabControl.index(CURRENT)]
txt_box = None
for widget_item in current_tab.winfo_children():
if isinstance(widget_item, Text):
txt_box = widget_item
contenido = txt_box.get(1.0, END)
with open(actual_name, "w") as f:
f.write(contenido)
self.update_status_bar(0)
except Exception as er:
print(er)
else:
self.save_as()
def update_status_bar(self, *args):
if args[0] == 0:
self.my_status.set("Archivo Guardado con éxito")
else:
self.my_status.set("Editor - 0.1")
#Configuraciones
def bind_shortcuts(self, text_edit):
text_edit.bind('<Control-n>', self.new_file)
text_edit.bind('<Control-o>', self.open_file)
text_edit.bind('<Control-s>', self.save_file)
text_edit.bind('<Control-S>', self.save_as)
text_edit.bind('<Control-w>', self.delete_tab)
text_edit.bind('<Key>', self.update_status_bar)
text_edit.bind('<Key>', lambda event, txt_area=text_edit: self.color_font_config(text_edit))
def update_line_number(self, txt_number, txt_area):
txt_number.config(state=NORMAL)
txt_number.delete("1.0","end")
lineas = int(txt_area.index('end').split('.')[0])
for i in range(1, lineas+1):
line_print = str(i) + "\n"
txt_number.insert(INSERT, line_print)
txt_number.config(state=DISABLED)
def update_line_number_back(self, txt_number, txt_area):
txt_number.config(state=NORMAL)
txt_number.delete("1.0","end")
lineas = int(txt_area.index('end').split('.')[0]) -1
for i in range(1, lineas+1):
line_print = str(i) + "\n"
txt_number.insert(INSERT, line_print)
txt_number.config(state=DISABLED)
def highlight_pattern(self, pattern, tag, txt_area, start="1.0", end="end", regexp=False, case_sensitive = 0):
start = txt_area.index(start)
end = txt_area.index(end)
txt_area.mark_set("matchStart", start)
txt_area.mark_set("matchEnd", start)
txt_area.mark_set("searchLimit", end)
count = IntVar()
while True:
index = txt_area.search(pattern, "matchEnd","searchLimit", count=count, regexp=regexp, nocase=case_sensitive)
if index == "": break
if count.get() == 0: break # degenerate pattern which matches zero-length strings
txt_area.mark_set("matchStart", index)
txt_area.mark_set("matchEnd", "%s+%sc" % (index, count.get()))
txt_area.tag_add(tag, "matchStart", "matchEnd")
def default_theme(self):
for tab_item in self.tabControl.winfo_children():
conteo = 0
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
if conteo != 0:
widget_item.config(foreground="#000000")
widget_item.config(background="#FFFFFF")
conteo += 1
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.config(foreground="#000000")
widget_item.config(background="#FFFFFF")
def gray_theme(self):
for tab_item in self.tabControl.winfo_children():
conteo = 0
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
if conteo != 0:
widget_item.config(foreground="#000000")
widget_item.config(background="#BBBDCC")
conteo += 1
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.config(foreground="#000000")
widget_item.config(background="#BBBDCC")
def dark_night_theme(self):
for tab_item in self.tabControl.winfo_children():
conteo = 0
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
if conteo != 0:
widget_item.config(foreground="#FFFFFF")
widget_item.config(background="#252327")
conteo += 1
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.config(foreground="#FFFFFF")
widget_item.config(background="#252327")
def light_blue_theme(self):
for tab_item in self.tabControl.winfo_children():
conteo = 0
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
if conteo != 0:
widget_item.config(foreground="#000000")
widget_item.config(background="#4C57C8")
conteo += 1
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.config(foreground="#000000")
widget_item.config(background="#4C57C8")
def dark_theme(self):
for tab_item in self.tabControl.winfo_children():
conteo = 0
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
if conteo != 0:
widget_item.config(foreground="#FFFFFF")
widget_item.config(background="#2D163D")
conteo += 1
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.config(foreground="#FFFFFF")
widget_item.config(background="#2D163D")
def color_font_config(self, txt_area):
#Colores
txt_area.tag_config("reservada", foreground="#A675B9")
txt_area.tag_config("id", foreground="#759AF0")
txt_area.tag_config("string", foreground="#7AC883")
txt_area.tag_config("comentario", foreground="#9BA29C")
txt_area.tag_config("item", foreground="#A675B9")
txt_area.tag_config("important", foreground="#E7375C")
txt_area.tag_config("function", foreground="#5182C9")
txt_area.tag_config("boolean", foreground="#FA8C31")
#Palabras
self.highlight_pattern(r'/\*(.|\n)*?\*/', "comentario", txt_area, regexp=True)
self.highlight_pattern(r'--.*\n', "comentario", txt_area, regexp=True)
self.highlight_pattern("SELECT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("UPDATE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("WHERE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("JOIN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CREATE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DELETE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("COUNT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SUM", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FROM", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CASE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("THEN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ELSE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SMALLINT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INTEGER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("BIGINT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DECIMAL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("NUMERIC", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("REAL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("MONEY", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CHAR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CHARACTER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("VARYING", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TIMESTAMP", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("WITHOUT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("WITH", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TIME", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ZONE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DATE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INTERVAL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FIELDS", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("YEAR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("MONTH", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DAY", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("HOUR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("MINUTE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SECOND", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TO", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("BOOLEAN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("AS", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ENUM", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TYPE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("IS", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ISNULL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("NOTNULL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("NOT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("AND", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("OR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("BETWEEN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("LIKE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("IN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INLIKE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SIMILAR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("REPLACE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("MODE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("OWNER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("IF", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("EXISTS", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ALTER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DATABASE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("RENAME", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DROP", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TABLE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("PRIMARY", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FOREIGN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("KEY", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("REFERENCES", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CONSTRAINT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CHECK", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SET", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INSERT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("BY", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("GROUP", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("HAVING", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ORDER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("WHEN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("UNION", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("END", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("VALUES", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INTERSECT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("LIMIT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INNER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("LEFT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("RIGHT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("OUTER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ASC", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DESC", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("GREATEST", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("LEAST", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("OFFSET", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FIRST", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("LAST", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FULL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ALL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TRUE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FALSE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INHERITS", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("NULL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SHOW", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DATABASES", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("USE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("VARCHAR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("==", "item", txt_area)
self.highlight_pattern("!=", "item", txt_area)
self.highlight_pattern(">=", "item", txt_area)
self.highlight_pattern("<=", "item", txt_area)
self.highlight_pattern(">", "item", txt_area)
self.highlight_pattern("<", "item", txt_area)
self.highlight_pattern("=", "item", txt_area)
self.highlight_pattern("+", "item", txt_area)
self.highlight_pattern("-", "item", txt_area)
self.highlight_pattern("*", "item", txt_area)
self.highlight_pattern("/", "item", txt_area)
self.highlight_pattern("self", "important", txt_area)
self.highlight_pattern("print", "function", txt_area)
self.highlight_pattern("true", "boolean", txt_area)
self.highlight_pattern("false", "boolean", txt_area)
self.highlight_pattern(r'(\".*?\")|(\'.*?\')', "string", txt_area, regexp=True)
def graficar_AST(self, ast_):
if len(ast_) != 0:
ast_str = 'digraph AST { \n node [shape=record];\n'
count_nodos = 0
for instruccion_ in ast_:
if count_nodos != 0:
ast_str += 'node' + str(count_nodos + 1000000) + '[label =\" Instruccion \"];\n'
ast_str += 'node' + str(count_nodos + 10000) + '[label =\" Instrucciones \"];\n'
ast_str += 'node' + str(count_nodos + 10000 - 1) + ' -> node' + str(count_nodos + 1000000) + ';\n'
ast_str += 'node' + str(count_nodos + 10000 - 1) + ' -> node' + str(count_nodos + 10000) + ';\n'
ast_str += 'node' + str(count_nodos + 1000000) + ' -> node' + instruccion_.nodo.num + ';\n'
ast_str += 'node' + instruccion_.nodo.num + '[label =\"'+ instruccion_.nodo.valor +"\"];\n"
ast_str += self.graficar_AST_hijos(instruccion_.nodo)
else:
ast_str += 'node' + instruccion_.nodo.num + '[label =\"'+ instruccion_.nodo.valor +"\"];\n"
ast_str += 'node' + str(count_nodos + 1000000) + '[label =\" Instruccion \"];\n'
ast_str += 'node' + str(count_nodos + 10000) + '[label =\" Instrucciones \"];\n'
ast_str += 'start_ast -> node' + str(count_nodos + 10000) + ';\n'
ast_str += 'start_ast -> node' + str(count_nodos + 1000000) + ';\n'
ast_str += 'node' + str(count_nodos + 1000000) + ' -> node' + instruccion_.nodo.num + ';\n'
ast_str += self.graficar_AST_hijos(instruccion_.nodo)
count_nodos += 1
ast_str += '\n}'
with open('ast_reporte.dot', 'w', encoding='utf8') as f:
f.write(ast_str)
def graficar_AST_hijos(self, instr_):
t = ''
for instruc in instr_.hijos:
t += 'node'+instruc.num+'[label=\"'+instruc.valor+'\"];\n'
t += 'node'+instr_.num + ' -> node' + instruc.num+';\n'
t += self.graficar_AST_hijos(instruc)
return t
def graficar_Gramatical(self, ast_):
if len(ast_) != 0:
grammar_str = 'digraph test {\ngraph [ratio=fill];\nnode [label=\"\\N\", fontsize=15, shape=plaintext];\ngraph [bb=\"0,0,352,154\"];\n'
grammar_str += 'arset [label=<\n<TABLE ALIGN=\"LEFT\">\n<TR>\n<TD>PRODUCCIÓN</TD><TD>ACCIONES</TD></TR>\n'
grammar_str += '<TR><TD>INSTRUCCIONES ::= INSTRUCCION INSTRUCCIONES1</TD><TD>INSTRUCCIONES = INSTRUCCIONES1; INSTRUCCIONES.append(INSTRUCCION); </TD></TR>\n'
grammar_str += '<TR><TD>INSTRUCCIONES ::= </TD><TD>INSTRUCCIONES = [];</TD></TR>\n'
for instr in ast_:
grammar_str += instr.grammar_ + '\n'
grammar_str += '</TABLE>\n>, ];\n}'
with open('grammar_reporte.dot', 'w', encoding='utf8') as f:
f.write(grammar_str)
def graficar_Errores(self):
if len(errores) != 0:
reporte_errores = "digraph test {\ngraph [ratio=fill];\nnode [label=\"\\N\", fontsize=15, shape=plaintext];\ngraph [bb=\"0,0,352,154\"];\n"
reporte_errores += "arset [label=<\n<TABLE ALIGN=\"LEFT\">\n<TR>\n<TD>Tipo Error</TD>\n<TD>Descripcion</TD>\n<TD>Linea</TD>\n<TD>Columna</TD>\n</TR>\n"
for error_ in errores:
reporte_errores += '<TR>'
reporte_errores += '<TD>' + error_.descripcion + '</TD>'
reporte_errores += '<TD>' + error_.valor +'</TD>'
reporte_errores += '<TD>' + error_.line +'</TD>'
reporte_errores += '<TD>' + error_.column +'</TD>'
reporte_errores += '</TR>\n'
reporte_errores += '</TABLE>\n>, ];\n}'
with open('errores_reporte.dot', 'w', encoding='utf8') as f:
f.write(reporte_errores)
def graficar_errores_lexicos(self):
if len(errores) != 0:
reporte_errores = "digraph test {\ngraph [ratio=fill];\nnode [label=\"\\N\", fontsize=15, shape=plaintext];\ngraph [bb=\"0,0,352,154\"];\n"
reporte_errores += "arset [label=<\n<TABLE ALIGN=\"LEFT\">\n<TR>\n<TD>Tipo Error</TD>\n<TD>Descripcion</TD>\n<TD>Linea</TD>\n<TD>Columna</TD>\n</TR>\n"
for error_ in errores:
if error_.descripcion.lower() == "léxico":
reporte_errores += '<TR>'
reporte_errores += '<TD>' + error_.descripcion + '</TD>'
reporte_errores += '<TD>' + error_.valor +'</TD>'
reporte_errores += '<TD>' + error_.line +'</TD>'
reporte_errores += '<TD>' + error_.column +'</TD>'
reporte_errores += '</TR>'
reporte_errores += '</TABLE>\n>, ];\n}'
with open('lexico_reporte.dot', 'w', encoding='utf8') as f:
f.write(reporte_errores)
def graficar_errores_sintacticos(self):
if len(errores) != 0:
reporte_errores = "digraph test {\ngraph [ratio=fill];\nnode [label=\"\\N\", fontsize=15, shape=plaintext];\ngraph [bb=\"0,0,352,154\"];\n"
reporte_errores += "arset [label=<\n<TABLE ALIGN=\"LEFT\">\n<TR>\n<TD>Tipo Error</TD>\n<TD>Descripcion</TD>\n<TD>Linea</TD>\n<TD>Columna</TD>\n</TR>\n"
for error_ in errores:
if error_.descripcion.lower() == "sintáctico":
reporte_errores += '<TR>'
reporte_errores += '<TD>' + error_.descripcion + '</TD>'
reporte_errores += '<TD>' + error_.valor +'</TD>'
reporte_errores += '<TD>' + error_.line +'</TD>'
reporte_errores += '<TD>' + error_.column +'</TD>'
reporte_errores += '</TR>'
reporte_errores += '</TABLE>\n>, ];\n}'
with open('sintactico_reporte.dot', 'w', encoding='utf8') as f:
f.write(reporte_errores)
def graficar_errores_semanticos(self):
if len(errores) != 0:
reporte_errores = "digraph test {\ngraph [ratio=fill];\nnode [label=\"\\N\", fontsize=15, shape=plaintext];\ngraph [bb=\"0,0,352,154\"];\n"
reporte_errores += "arset [label=<\n<TABLE ALIGN=\"LEFT\">\n<TR>\n<TD>Tipo Error</TD>\n<TD>Descripcion</TD>\n<TD>Linea</TD>\n<TD>Columna</TD>\n</TR>\n"
for error_ in errores:
if error_.descripcion.lower() == "semántico":
reporte_errores += '<TR>'
reporte_errores += '<TD>' + error_.descripcion + '</TD>'
reporte_errores += '<TD>' + error_.valor +'</TD>'
reporte_errores += '<TD>' + error_.line +'</TD>'
reporte_errores += '<TD>' + error_.column +'</TD>'
reporte_errores += '</TR>'
reporte_errores += '</TABLE>\n>, ];\n}'
with open('semantico_reporte.dot', 'w', encoding='utf8') as f:
f.write(reporte_errores)
def graficar_TS(self):
reporte_ts = ts.reporte_ts()
with open('ts_reporte.dot', 'w', encoding='utf8') as f:
f.write(reporte_ts)
def compilar_ts_png(self):
img = Source.from_file("ts_reporte.dot", format = "png", encoding="utf8")
img.render()
entrada = self.popup_reporte_png(self.ventana, "ts_reporte.dot.png")
def compilar_ts_pdf(self):
file_pdf = Source.from_file("ts_reporte.dot", format = "pdf", encoding="utf8")
file_pdf.view()
def compilar_grammar_png(self):
img = Source.from_file("grammar_reporte.dot", format = "png", encoding="utf8")
img.render()
entrada = self.popup_reporte_png(self.ventana, "grammar_reporte.dot.png")
def compilar_grammar_pdf(self):
file_pdf = Source.from_file("grammar_reporte.dot", format = "pdf", encoding="utf8")
file_pdf.view()
def compilar_semantico_png(self):
img = Source.from_file("semantico_reporte.dot", format = "png", encoding='utf8')
img.render()
entrada = self.popup_reporte_png(self.ventana, "semantico_reporte.dot.png")
def compilar_semantico_pdf(self):
file_pdf = Source.from_file("semantico_reporte.dot", format = "pdf", encoding='utf8')
file_pdf.view()
def compilar_sintactico_png(self):
img = Source.from_file("sintactico_reporte.dot", format = "png", encoding='utf8')
img.render()
entrada = self.popup_reporte_png(self.ventana, "sintactico_reporte.dot.png")
def compilar_sintactico_pdf(self):
file_pdf = Source.from_file("sintactico_reporte.dot", format = "pdf", encoding='utf8')
file_pdf.view()
def compilar_lexico_png(self):
img = Source.from_file("lexico_reporte.dot", format = "png", encoding='utf8')
img.render()
entrada = self.popup_reporte_png(self.ventana, "lexico_reporte.dot.png")
def compilar_lexico_pdf(self):
file_pdf = Source.from_file("lexico_reporte.dot", format = "pdf", encoding='utf8')
file_pdf.view()
def compilar_Error_png(self):
img = Source.from_file("errores_reporte.dot", format="png", encoding='utf8')
img.render()
entrada = self.popup_reporte_png(self.ventana, "errores_reporte.dot.png")
def compilar_Error_pdf(self):
file_pdf = Source.from_file("errores_reporte.dot", format="pdf", encoding='utf8')
file_pdf.view()
def compilar_AST_png(self):
img = Source.from_file("ast_reporte.dot", format="png", encoding='utf8')
img.render()
entrada = self.popup_reporte_png(self.ventana, "ast_reporte.dot.png")
def compilar_AST_pdf(self):
file_pdf = Source.from_file("ast_reporte.dot", format="pdf", encoding='utf8')
file_pdf.view()
def popup_reporte_png(self, master, path):
top = self.top = Toplevel(master)
img = ImageTk.PhotoImage(Image.open(path))
panel = Label(top, image = img)
panel.image = img
panel.pack(side = "bottom", fill = "both", expand = "yes")
def ejecutar_codigo(self):
errores = []
tab_list = self.tabControl.winfo_children()
current_tab = tab_list[self.tabControl.index(CURRENT)]
txt_box = None
for widget_item in current_tab.winfo_children():
if isinstance(widget_item, Text):
txt_box = widget_item
contenido = txt_box.get(1.0, END)
instruccions = []
#try:
instruccions = gramatica.parse(contenido)
self.ejecutar_resultado(instruccions)
#except:
# if len(contenido) == 1:
# add_text("No hay código para ejecutar")
# else:
# add_text("Error al ejecutar el código")
#Imprimir consola
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.delete('1.0', END)
add_text("\nPS C:\\Users\\Grupo 23> ")
widget_item.insert(INSERT, get_contenido())
self.graficar_AST(instruccions)
self.graficar_Errores()
self.graficar_errores_lexicos()
self.graficar_errores_sintacticos()
self.graficar_errores_semanticos()
self.graficar_Gramatical(instruccions)
self.graficar_TS()
def ejecutar_resultado(self,instrucciones_):
for instruccion_ in instrucciones_:
instruccion_.ejecutar()
if __name__ == "__main__":
index = window()
index.run()
| 46.461896 | 218 | 0.597784 | from tkinter import *
from tkinter import ttk
from tkinter import filedialog
from PIL import Image, ImageTk
import grammar.sql_grammar as gramatica
from graphviz import Source
from tools.console_text import *
from error.errores import *
from tools.tabla_simbolos import *
import os
class window:
def __init__(self):
self.ventana = Tk()
self.color = "darkgray"
self.dir_os = os.path.dirname(__file__)
self.count_tabs = 1
self.tabControl = ttk.Notebook(self.ventana,height=500)
self.tab_salida = ttk.Notebook(self.ventana, height=100)
self.my_status = StringVar()
self.ventana.title("Interfaz para compiladores")
self.ventana.geometry("900x700")
self.ventana.config(bg=self.color)
self.create_menu_bar()
self.create_tool_bar()
self.add_tab("Untitled-"+str(self.count_tabs))
self.create_consola()
self.create_status_bar()
self.search_text = ""
self.replace_text = ""
def run(self):
self.ventana.mainloop()
def create_menu_bar(self):
my_menu = Menu(self.ventana)
self.ventana.config(menu = my_menu)
file_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label="File", menu=file_menu)
edit_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label="Edit", menu=edit_menu)
tools_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label="Tools", menu=tools_menu)
options_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label="Options", menu=options_menu)
help_menu = Menu(my_menu, tearoff=0)
my_menu.add_cascade(label="Help", menu=help_menu)
file_menu.add_command(label="New File", command=self.new_file, accelerator="Ctrl+N")
file_menu.add_separator()
file_menu.add_command(label="Open File", command=self.open_file , accelerator="Ctrl+O")
file_menu.add_separator()
file_menu.add_command(label="Save File", command=self.save_file, accelerator="Ctrl+Shift-S")
file_menu.add_command(label="Save As", command=self.save_as, accelerator="Ctrl+S")
file_menu.add_separator()
file_menu.add_command(label="Close Tab", command=self.delete_tab, accelerator="Ctrl+W")
file_menu.add_command(label="Exit", command=self.ventana.quit)
edit_menu.add_command(label="Copy", command=lambda:self.ventana.focus_get().event_generate('<<Copy>>'))
edit_menu.add_command(label="Paste", command=lambda:self.ventana.focus_get().event_generate('<<Paste>>'))
edit_menu.add_command(label="Cut", command=lambda:self.ventana.focus_get().event_generate('<<Cut>>'))
edit_menu.add_separator()
edit_menu.add_command(label="Search", command=self.find_popup, accelerator="Ctrl+F")
edit_menu.add_command(label="Replace", command=self.replace_popup, accelerator="Ctrl+H")
tools_menu.add_command(label="Ejecutar", command=self.ejecutar_codigo, accelerator="F5")
tools_menu.add_separator()
tools_menu.add_command(label="AST", command = self.compilar_AST_pdf)
tools_menu.add_separator()
tools_menu.add_command(label = "Errores Lexicos", command = self.compilar_lexico_pdf)
tools_menu.add_command(label = "Errores Sintacticos", command = self.compilar_sintactico_pdf)
tools_menu.add_command(label = "Errores Semanticos", command = self.compilar_semantico_pdf)
tools_menu.add_command(label = "Todos los errores", command = self.compilar_Error_pdf)
tools_menu.add_separator()
tools_menu.add_command(label = "Reporte Gramatical", command = self.compilar_grammar_pdf)
tools_menu.add_separator()
tools_menu.add_command(label = "Tabla de Simbolos", command=self.compilar_ts_pdf)
theme_menu = Menu(options_menu, tearoff=0)
options_menu.add_cascade(label="Theme", menu=theme_menu)
theme_menu.add_command(label="Default", command=self.default_theme)
theme_menu.add_command(label="Light Gray", command=self.gray_theme)
theme_menu.add_command(label="Dark Night", command=self.dark_night_theme)
theme_menu.add_command(label="Light Blue", command=self.light_blue_theme)
theme_menu.add_command(label="Dark", command=self.dark_theme)
help_menu.add_command(label="Help")
help_menu.add_command(label="About", command=self.popup_about)
def popup_about(self):
popup = Tk()
popup.wm_title("About")
popup.geometry("330x190")
popup.resizable(False, False)
label = ttk.Label(popup, text="------------------ EDITOR COMPILADORES 2 ------------------", relief="sunken")
label.pack(side="top", fill="x", pady=3)
label = ttk.Label(popup, text="Versión: 1.51.1 (system setup)")
label.pack(side="top", fill="x", pady=3)
label = ttk.Label(popup, text="Confirmación: -----------------------------------")
label.pack(side="top", fill="x", pady=3)
label = ttk.Label(popup, text="Fecha: 2020-12-10T08:44:32")
label.pack(side="top", fill="x", pady=3)
label = ttk.Label(popup, text="Sistema Operativo: Windows_NT x64 10.0.18363")
label.pack(side="top", fill="x", pady=3)
label = ttk.Label(popup, text="Developer: Luis Fernando Arana Arias - 201700988\nPedro Rolando Ordoñez Carrillo - 201701187\nSteven Aaron Sis Hernandez - 201706357\nDavis Francisco Edward Enriquez - 201700972")
label.pack(side="top", fill="x", pady=3)
B1 = ttk.Button(popup, text="Close", command = popup.destroy)
B1.pack()
popup.mainloop()
def pop_alert(self, msg):
gui = Tk()
gui.title("Alerta")
gui.geometry("230x60")
gui.resizable(False,False)
label = ttk.Label(gui, text=msg)
label.pack(side="top", fill="x", pady=3)
B1 = ttk.Button(gui, text="Close", command = gui.destroy)
B1.pack()
gui.mainloop()
def find_popup(self):
popup = Tk()
popup.title("Buscar")
popup.geometry("250x30")
popup.resizable(False, False)
Label(popup, text="> ").pack(side=LEFT)
txtbox = Entry(popup)
txtbox.pack(side = LEFT, fill = BOTH, expand = 1, pady=2)
txtbox.focus_set()
Find = Button(popup, text ='Buscar')
Find.pack(side = LEFT)
def find_func():
tab_list = self.tabControl.winfo_children()
current_tab = tab_list[self.tabControl.index(CURRENT)]
txt_box = None
for widget_item in current_tab.winfo_children():
if isinstance(widget_item, Text):
txt_box = widget_item
text = txtbox.get()
txt_box.tag_remove('found', '1.0', END)
if (text):
idx = '1.0'
while 1:
idx = txt_box.search(text, idx, nocase = 1, stopindex = END)
if not idx: break
lastidx = '% s+% dc' % (idx, len(text))
txt_box.tag_add('found', idx, lastidx)
idx = lastidx
txt_box.tag_config('found', foreground='red', background ='#CACACA')
Find.config(command = find_func)
def replace_popup(self):
popup = Tk()
popup.title("Buscar y Remplazar")
popup.geometry("350x30")
popup.resizable(False, False)
Label(popup, text="> ").pack(side=LEFT)
txtbox = Entry(popup)
txtbox.pack(side = LEFT, fill = BOTH, expand = 1, pady=2)
txtbox.focus_set()
Label(popup, text="> ").pack(side=LEFT)
txtbox2 = Entry(popup)
txtbox2.pack(side = LEFT, fill = BOTH, expand = 1, pady=2)
txtbox2.focus_set()
Replace = Button(popup, text ='Remplazar')
Replace.pack(side = LEFT)
def replace_func():
tab_list = self.tabControl.winfo_children()
current_tab = tab_list[self.tabControl.index(CURRENT)]
txt_box = None
for widget_item in current_tab.winfo_children():
if isinstance(widget_item, Text):
txt_box = widget_item
text_find = txtbox.get()
text_replace = txtbox2.get()
txt_box.tag_remove('found', '1.0', END)
if (text_find):
idx = '1.0'
while 1:
idx = txt_box.search(text_find, idx, nocase = 1, stopindex = END)
if not idx: break
lastidx = '% s+% dc' % (idx, len(text_find))
txt_box.delete(idx, lastidx)
txt_box.insert(idx, text_replace)
lastidx = '% s+% dc' % (idx, len(text_replace))
txt_box.tag_add('found', idx, lastidx)
idx = lastidx
txt_box.tag_config('found', foreground ='green', background = 'yellow')
Replace.config(command = replace_func)
def create_tool_bar(self):
myTool = Frame(self.ventana)
imgOpen = Image.open(self.dir_os +'/assets/open.png')
imgOpen = imgOpen.resize((20,20), Image.ANTIALIAS)
imgOpen = ImageTk.PhotoImage(imgOpen)
OpenBtn = Button(myTool, image=imgOpen, command=self.open_file)
OpenBtn.image = imgOpen
OpenBtn.pack(side=LEFT, padx=2, pady=2)
imgFile = Image.open(self.dir_os +'/assets/file.png')
imgFile = imgFile.resize((20, 20), Image.ANTIALIAS)
imgFile = ImageTk.PhotoImage(imgFile)
FileBtn = Button(myTool, image=imgFile, command=self.new_file)
FileBtn.image = imgFile
FileBtn.pack(side=LEFT, padx=2, pady=2)
imgSave = Image.open(self.dir_os +'/assets/save.png')
imgSave = imgSave.resize((20, 20), Image.ANTIALIAS)
imgSave = ImageTk.PhotoImage(imgSave)
SaveBtn = Button(myTool, image=imgSave, command=self.save_as)
SaveBtn.image = imgSave
SaveBtn.pack(side=LEFT, padx=2, pady=2)
imgSearch = Image.open(self.dir_os +'/assets/search.png')
imgSearch = imgSearch.resize((20, 20), Image.ANTIALIAS)
imgSearch = ImageTk.PhotoImage(imgSearch)
SearchBtn = Button(myTool, image=imgSearch, command=self.find_popup)
SearchBtn.image = imgSearch
SearchBtn.pack(side=LEFT, padx=2, pady=2)
imgDebug = Image.open(self.dir_os +'/assets/debug.png')
imgDebug = imgDebug.resize((20, 20), Image.ANTIALIAS)
imgDebug = ImageTk.PhotoImage(imgDebug)
DebugBtn = Button(myTool, image=imgDebug, command=self.open_file)
DebugBtn.image = imgDebug
DebugBtn.pack(side=RIGHT, padx=2, pady=2)
imgExecute = Image.open(self.dir_os +'/assets/execute.png')
imgExecute = imgExecute.resize((20, 20), Image.ANTIALIAS)
imgExecute = ImageTk.PhotoImage(imgExecute)
ExecuteBtn = Button(myTool, image=imgExecute, command=self.ejecutar_codigo)
ExecuteBtn.image = imgExecute
ExecuteBtn.pack(side=RIGHT, padx=2, pady=2)
imgAbout = Image.open(self.dir_os +'/assets/about.png')
imgAbout = imgAbout.resize((20, 20), Image.ANTIALIAS)
imgAbout = ImageTk.PhotoImage(imgAbout)
AboutBtn = Button(myTool, image=imgAbout, command=self.popup_about)
AboutBtn.image = imgAbout
AboutBtn.pack(side=LEFT, padx=2, pady=2)
imgClose = Image.open(self.dir_os +'/assets/close.png')
imgClose = imgClose.resize((20, 20), Image.ANTIALIAS)
imgClose = ImageTk.PhotoImage(imgClose)
CloseBtn = Button(myTool, image=imgClose, command=self.delete_tab)
CloseBtn.image = imgClose
CloseBtn.pack(side=LEFT, padx=2, pady=2)
imgClear = Image.open(self.dir_os +'/assets/clear.png')
imgClear = imgClear.resize((20, 20), Image.ANTIALIAS)
imgClear = ImageTk.PhotoImage(imgClear)
ClearBtn = Button(myTool, image=imgClear, command=self.clear_consola)
ClearBtn.image = imgClear
ClearBtn.pack(side=LEFT, padx=2, pady=2)
imgAst = Image.open(self.dir_os +'/assets/ast.png')
imgAst = imgAst.resize((20, 20), Image.ANTIALIAS)
imgAst = ImageTk.PhotoImage(imgAst)
AstBtn = Button(myTool, image=imgAst, command=self.compilar_AST_pdf)
AstBtn.image = imgAst
AstBtn.pack(side=LEFT, padx=2, pady=2)
imgErrores = Image.open(self.dir_os +'/assets/error.png')
imgErrores = imgErrores.resize((20, 20), Image.ANTIALIAS)
imgErrores = ImageTk.PhotoImage(imgErrores)
ErroresBtn = Button(myTool, image=imgErrores, command=self.compilar_Error_pdf)
ErroresBtn.image = imgErrores
ErroresBtn.pack(side=LEFT, padx=2, pady=2)
imgGrammar = Image.open(self.dir_os +'/assets/grammar.png')
imgGrammar = imgGrammar.resize((20, 20), Image.ANTIALIAS)
imgGrammar = ImageTk.PhotoImage(imgGrammar)
GrammarBtn = Button(myTool, image=imgGrammar, command=self.compilar_grammar_pdf)
GrammarBtn.image = imgGrammar
GrammarBtn.pack(side=LEFT, padx=2, pady=2)
imgSimbolo = Image.open(self.dir_os +'/assets/simbolos.png')
imgSimbolo = imgSimbolo.resize((20, 20), Image.ANTIALIAS)
imgSimbolo = ImageTk.PhotoImage(imgSimbolo)
SimboloBtn = Button(myTool, image=imgSimbolo, command=self.compilar_ts_pdf)
SimboloBtn.image = imgSimbolo
SimboloBtn.pack(side=LEFT, padx=2, pady=2)
myTool.pack(side=TOP, fill=X)
def clear_consola(self):
limpiar_consola()
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.delete('1.0', END)
widget_item.insert(INSERT, get_contenido())
def create_consola(self):
tab_consola = ttk.Frame(self.tab_salida)
new_scroll = Scrollbar(tab_consola)
font_spec = ("Consolas", 11)
consola = Text(tab_consola, font=font_spec)
consola.pack(side=LEFT, fill=BOTH, expand=TRUE)
consola.config(yscrollcommand=new_scroll.set)
consola.insert(INSERT, contenido_consola)
new_scroll.pack(side=RIGHT, fill=Y)
new_scroll.config(command=consola.yview)
self.tab_salida.add(tab_consola, text="Consola")
self.tab_salida.pack(side=TOP, fill=BOTH, expand=TRUE)
def add_tab(self, title):
new_tab = ttk.Frame(self.tabControl)
font_spec = ("Consolas", 11)
new_scroll = Scrollbar(new_tab)
txt_numbers = self.set_line_numbers(new_tab)
new_textarea = Text(new_tab, font=font_spec)
def double_scroll(self, *args):
txt_numbers.yview_moveto(*args)
new_textarea.yview_moveto(*args)
new_scroll.config(command=double_scroll)
def update_scroll(first, last):
txt_numbers.yview_moveto(first)
new_scroll.set(first, last)
txt_numbers.configure(yscrollcommand=update_scroll)
new_textarea.configure(yscrollcommand=update_scroll)
new_textarea.bind('<Return>', lambda event, txt_number=txt_numbers, txt_area=new_textarea: self.update_line_number(txt_number, new_textarea))
new_textarea.bind('<BackSpace>', lambda event, txt_number=txt_numbers, txt_area=new_textarea: self.update_line_number_back(txt_number, new_textarea))
new_textarea.bind('<Control-Return>', lambda event, txt_number=txt_numbers, txt_area=new_textarea: self.update_line_number_back(txt_number, new_textarea))
new_scroll.pack(side=RIGHT, fill=Y)
new_textarea.pack(side=LEFT, fill=BOTH, expand=TRUE)
self.update_line_number_back(txt_numbers, new_textarea)
self.bind_shortcuts(new_textarea)
self.color_font_config(new_textarea)
self.tabControl.add(new_tab, text=title)
self.tabControl.select(self.count_tabs-1)
self.tabControl.pack(side=TOP, fill=BOTH, expand=TRUE)
def create_status_bar(self):
self.my_status.set("Editor - 0.1")
font_spec = ("Consolas", 11)
label = Label(self.ventana, textvariable=self.my_status, fg="black", bg="lightgrey", anchor="sw", font=font_spec)
label.pack(side=BOTTOM, fill=BOTH)
def set_line_numbers(self, tab_item):
line_number = Text(tab_item, width=4)
line_number.pack(side=LEFT, fill=Y)
font_spec = ("Consolas", 11)
line_number.config(font=font_spec)
line_number.config(state=DISABLED)
line_number.config(background="#BBBDCC")
return line_number
def open_file(self, *args):
file_name = None
file_name = filedialog.askopenfilename(
defaultextension = ".txt",
filetypes = [("All Files", "*.*"),
("Text Files", "*.txt"),
("Python Scripts", "*.py")])
if file_name:
index_tab = self.tab_libre()
tab = None
if index_tab != -1:
self.tabControl.tab(index_tab, text=file_name)
tabs_list = self.tabControl.winfo_children()
tab = tabs_list[index_tab]
self.tabControl.select(index_tab)
else:
self.count_tabs +=1
self.add_tab(file_name)
tabs_list = self.tabControl.winfo_children()
tab = tabs_list[self.count_tabs - 1]
txt_box = None
widget_list = tab.winfo_children()
for widget_item in widget_list:
if isinstance(widget_item, Text):
txt_box = widget_item
with open(file_name, "r") as f:
txt_box.insert("1.0", f.read())
self.update_line_number_back(widget_list[1], widget_list[2])
self.color_font_config(widget_list[2])
def new_file(self, *args):
self.count_tabs +=1
self.add_tab("Untitled-"+str(self.count_tabs))
def tab_libre(self):
if self.count_tabs == 0:
return -1
tab_ideal = 0
tabs_list = self.tabControl.winfo_children()
for tab_item in tabs_list:
widget_list = tab_item.winfo_children()
for widget_item in widget_list:
if isinstance(widget_item, Text):
contenido_txt = widget_item.get("1.0",END)
if contenido_txt == "\n":
return tab_ideal
tab_ideal += 1
return -1
def delete_tab(self, *args):
for item in self.tabControl.winfo_children():
if str(item)==self.tabControl.select():
item.destroy()
self.count_tabs -= 1
return
def save_as(self, *args):
try:
new_file = filedialog.asksaveasfilename(
initialfile="Untitled-" + str(self.tabControl.index(CURRENT) + 1 ),
defaultextension = ".txt",
filetypes = [("All Files", "*.*"),
("Text Files", "*.txt"),
("Python Scripts", "*.py")])
tab_list = self.tabControl.winfo_children()
current_tab = tab_list[self.tabControl.index(CURRENT)]
txt_box = None
for widget_item in current_tab.winfo_children():
if isinstance(widget_item, Text):
txt_box = widget_item
contenido = txt_box.get(1.0, END)
with open(new_file, "w") as f:
f.write(contenido)
self.tabControl.tab(self.tabControl.index(CURRENT), text=new_file)
self.update_status_bar(0)
except Exception as er:
print(er)
def save_file(self, *args):
nombre_eval = "Untitled-"
actual_name = self.tabControl.tab(CURRENT, "text")
nombre_aux = actual_name[:-1]
if nombre_aux != nombre_eval:
try:
tab_list = self.tabControl.winfo_children()
current_tab = tab_list[self.tabControl.index(CURRENT)]
txt_box = None
for widget_item in current_tab.winfo_children():
if isinstance(widget_item, Text):
txt_box = widget_item
contenido = txt_box.get(1.0, END)
with open(actual_name, "w") as f:
f.write(contenido)
self.update_status_bar(0)
except Exception as er:
print(er)
else:
self.save_as()
def update_status_bar(self, *args):
if args[0] == 0:
self.my_status.set("Archivo Guardado con éxito")
else:
self.my_status.set("Editor - 0.1")
def bind_shortcuts(self, text_edit):
text_edit.bind('<Control-n>', self.new_file)
text_edit.bind('<Control-o>', self.open_file)
text_edit.bind('<Control-s>', self.save_file)
text_edit.bind('<Control-S>', self.save_as)
text_edit.bind('<Control-w>', self.delete_tab)
text_edit.bind('<Key>', self.update_status_bar)
text_edit.bind('<Key>', lambda event, txt_area=text_edit: self.color_font_config(text_edit))
def update_line_number(self, txt_number, txt_area):
txt_number.config(state=NORMAL)
txt_number.delete("1.0","end")
lineas = int(txt_area.index('end').split('.')[0])
for i in range(1, lineas+1):
line_print = str(i) + "\n"
txt_number.insert(INSERT, line_print)
txt_number.config(state=DISABLED)
def update_line_number_back(self, txt_number, txt_area):
txt_number.config(state=NORMAL)
txt_number.delete("1.0","end")
lineas = int(txt_area.index('end').split('.')[0]) -1
for i in range(1, lineas+1):
line_print = str(i) + "\n"
txt_number.insert(INSERT, line_print)
txt_number.config(state=DISABLED)
def highlight_pattern(self, pattern, tag, txt_area, start="1.0", end="end", regexp=False, case_sensitive = 0):
start = txt_area.index(start)
end = txt_area.index(end)
txt_area.mark_set("matchStart", start)
txt_area.mark_set("matchEnd", start)
txt_area.mark_set("searchLimit", end)
count = IntVar()
while True:
index = txt_area.search(pattern, "matchEnd","searchLimit", count=count, regexp=regexp, nocase=case_sensitive)
if index == "": break
if count.get() == 0: break
txt_area.mark_set("matchStart", index)
txt_area.mark_set("matchEnd", "%s+%sc" % (index, count.get()))
txt_area.tag_add(tag, "matchStart", "matchEnd")
def default_theme(self):
for tab_item in self.tabControl.winfo_children():
conteo = 0
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
if conteo != 0:
widget_item.config(foreground="#000000")
widget_item.config(background="#FFFFFF")
conteo += 1
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.config(foreground="#000000")
widget_item.config(background="#FFFFFF")
def gray_theme(self):
for tab_item in self.tabControl.winfo_children():
conteo = 0
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
if conteo != 0:
widget_item.config(foreground="#000000")
widget_item.config(background="#BBBDCC")
conteo += 1
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.config(foreground="#000000")
widget_item.config(background="#BBBDCC")
def dark_night_theme(self):
for tab_item in self.tabControl.winfo_children():
conteo = 0
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
if conteo != 0:
widget_item.config(foreground="#FFFFFF")
widget_item.config(background="#252327")
conteo += 1
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.config(foreground="#FFFFFF")
widget_item.config(background="#252327")
def light_blue_theme(self):
for tab_item in self.tabControl.winfo_children():
conteo = 0
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
if conteo != 0:
widget_item.config(foreground="#000000")
widget_item.config(background="#4C57C8")
conteo += 1
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.config(foreground="#000000")
widget_item.config(background="#4C57C8")
def dark_theme(self):
for tab_item in self.tabControl.winfo_children():
conteo = 0
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
if conteo != 0:
widget_item.config(foreground="#FFFFFF")
widget_item.config(background="#2D163D")
conteo += 1
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.config(foreground="#FFFFFF")
widget_item.config(background="#2D163D")
def color_font_config(self, txt_area):
txt_area.tag_config("reservada", foreground="#A675B9")
txt_area.tag_config("id", foreground="#759AF0")
txt_area.tag_config("string", foreground="#7AC883")
txt_area.tag_config("comentario", foreground="#9BA29C")
txt_area.tag_config("item", foreground="#A675B9")
txt_area.tag_config("important", foreground="#E7375C")
txt_area.tag_config("function", foreground="#5182C9")
txt_area.tag_config("boolean", foreground="#FA8C31")
self.highlight_pattern(r'/\*(.|\n)*?\*/', "comentario", txt_area, regexp=True)
self.highlight_pattern(r'--.*\n', "comentario", txt_area, regexp=True)
self.highlight_pattern("SELECT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("UPDATE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("WHERE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("JOIN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CREATE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DELETE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("COUNT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SUM", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FROM", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CASE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("THEN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ELSE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SMALLINT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INTEGER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("BIGINT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DECIMAL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("NUMERIC", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("REAL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("MONEY", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CHAR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CHARACTER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("VARYING", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TIMESTAMP", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("WITHOUT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("WITH", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TIME", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ZONE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DATE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INTERVAL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FIELDS", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("YEAR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("MONTH", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DAY", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("HOUR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("MINUTE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SECOND", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TO", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("BOOLEAN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("AS", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ENUM", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TYPE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("IS", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ISNULL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("NOTNULL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("NOT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("AND", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("OR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("BETWEEN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("LIKE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("IN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INLIKE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SIMILAR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("REPLACE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("MODE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("OWNER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("IF", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("EXISTS", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ALTER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DATABASE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("RENAME", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DROP", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TABLE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("PRIMARY", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FOREIGN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("KEY", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("REFERENCES", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CONSTRAINT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("CHECK", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SET", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INSERT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("BY", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("GROUP", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("HAVING", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ORDER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("WHEN", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("UNION", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("END", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("VALUES", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INTERSECT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("LIMIT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INNER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("LEFT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("RIGHT", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("OUTER", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ASC", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DESC", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("GREATEST", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("LEAST", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("OFFSET", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FIRST", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("LAST", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FULL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("ALL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("TRUE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("FALSE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("INHERITS", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("NULL", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("SHOW", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("DATABASES", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("USE", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("VARCHAR", "reservada", txt_area, case_sensitive=1)
self.highlight_pattern("==", "item", txt_area)
self.highlight_pattern("!=", "item", txt_area)
self.highlight_pattern(">=", "item", txt_area)
self.highlight_pattern("<=", "item", txt_area)
self.highlight_pattern(">", "item", txt_area)
self.highlight_pattern("<", "item", txt_area)
self.highlight_pattern("=", "item", txt_area)
self.highlight_pattern("+", "item", txt_area)
self.highlight_pattern("-", "item", txt_area)
self.highlight_pattern("*", "item", txt_area)
self.highlight_pattern("/", "item", txt_area)
self.highlight_pattern("self", "important", txt_area)
self.highlight_pattern("print", "function", txt_area)
self.highlight_pattern("true", "boolean", txt_area)
self.highlight_pattern("false", "boolean", txt_area)
self.highlight_pattern(r'(\".*?\")|(\'.*?\')', "string", txt_area, regexp=True)
def graficar_AST(self, ast_):
if len(ast_) != 0:
ast_str = 'digraph AST { \n node [shape=record];\n'
count_nodos = 0
for instruccion_ in ast_:
if count_nodos != 0:
ast_str += 'node' + str(count_nodos + 1000000) + '[label =\" Instruccion \"];\n'
ast_str += 'node' + str(count_nodos + 10000) + '[label =\" Instrucciones \"];\n'
ast_str += 'node' + str(count_nodos + 10000 - 1) + ' -> node' + str(count_nodos + 1000000) + ';\n'
ast_str += 'node' + str(count_nodos + 10000 - 1) + ' -> node' + str(count_nodos + 10000) + ';\n'
ast_str += 'node' + str(count_nodos + 1000000) + ' -> node' + instruccion_.nodo.num + ';\n'
ast_str += 'node' + instruccion_.nodo.num + '[label =\"'+ instruccion_.nodo.valor +"\"];\n"
ast_str += self.graficar_AST_hijos(instruccion_.nodo)
else:
ast_str += 'node' + instruccion_.nodo.num + '[label =\"'+ instruccion_.nodo.valor +"\"];\n"
ast_str += 'node' + str(count_nodos + 1000000) + '[label =\" Instruccion \"];\n'
ast_str += 'node' + str(count_nodos + 10000) + '[label =\" Instrucciones \"];\n'
ast_str += 'start_ast -> node' + str(count_nodos + 10000) + ';\n'
ast_str += 'start_ast -> node' + str(count_nodos + 1000000) + ';\n'
ast_str += 'node' + str(count_nodos + 1000000) + ' -> node' + instruccion_.nodo.num + ';\n'
ast_str += self.graficar_AST_hijos(instruccion_.nodo)
count_nodos += 1
ast_str += '\n}'
with open('ast_reporte.dot', 'w', encoding='utf8') as f:
f.write(ast_str)
def graficar_AST_hijos(self, instr_):
t = ''
for instruc in instr_.hijos:
t += 'node'+instruc.num+'[label=\"'+instruc.valor+'\"];\n'
t += 'node'+instr_.num + ' -> node' + instruc.num+';\n'
t += self.graficar_AST_hijos(instruc)
return t
def graficar_Gramatical(self, ast_):
if len(ast_) != 0:
grammar_str = 'digraph test {\ngraph [ratio=fill];\nnode [label=\"\\N\", fontsize=15, shape=plaintext];\ngraph [bb=\"0,0,352,154\"];\n'
grammar_str += 'arset [label=<\n<TABLE ALIGN=\"LEFT\">\n<TR>\n<TD>PRODUCCIÓN</TD><TD>ACCIONES</TD></TR>\n'
grammar_str += '<TR><TD>INSTRUCCIONES ::= INSTRUCCION INSTRUCCIONES1</TD><TD>INSTRUCCIONES = INSTRUCCIONES1; INSTRUCCIONES.append(INSTRUCCION); </TD></TR>\n'
grammar_str += '<TR><TD>INSTRUCCIONES ::= </TD><TD>INSTRUCCIONES = [];</TD></TR>\n'
for instr in ast_:
grammar_str += instr.grammar_ + '\n'
grammar_str += '</TABLE>\n>, ];\n}'
with open('grammar_reporte.dot', 'w', encoding='utf8') as f:
f.write(grammar_str)
def graficar_Errores(self):
if len(errores) != 0:
reporte_errores = "digraph test {\ngraph [ratio=fill];\nnode [label=\"\\N\", fontsize=15, shape=plaintext];\ngraph [bb=\"0,0,352,154\"];\n"
reporte_errores += "arset [label=<\n<TABLE ALIGN=\"LEFT\">\n<TR>\n<TD>Tipo Error</TD>\n<TD>Descripcion</TD>\n<TD>Linea</TD>\n<TD>Columna</TD>\n</TR>\n"
for error_ in errores:
reporte_errores += '<TR>'
reporte_errores += '<TD>' + error_.descripcion + '</TD>'
reporte_errores += '<TD>' + error_.valor +'</TD>'
reporte_errores += '<TD>' + error_.line +'</TD>'
reporte_errores += '<TD>' + error_.column +'</TD>'
reporte_errores += '</TR>\n'
reporte_errores += '</TABLE>\n>, ];\n}'
with open('errores_reporte.dot', 'w', encoding='utf8') as f:
f.write(reporte_errores)
def graficar_errores_lexicos(self):
if len(errores) != 0:
reporte_errores = "digraph test {\ngraph [ratio=fill];\nnode [label=\"\\N\", fontsize=15, shape=plaintext];\ngraph [bb=\"0,0,352,154\"];\n"
reporte_errores += "arset [label=<\n<TABLE ALIGN=\"LEFT\">\n<TR>\n<TD>Tipo Error</TD>\n<TD>Descripcion</TD>\n<TD>Linea</TD>\n<TD>Columna</TD>\n</TR>\n"
for error_ in errores:
if error_.descripcion.lower() == "léxico":
reporte_errores += '<TR>'
reporte_errores += '<TD>' + error_.descripcion + '</TD>'
reporte_errores += '<TD>' + error_.valor +'</TD>'
reporte_errores += '<TD>' + error_.line +'</TD>'
reporte_errores += '<TD>' + error_.column +'</TD>'
reporte_errores += '</TR>'
reporte_errores += '</TABLE>\n>, ];\n}'
with open('lexico_reporte.dot', 'w', encoding='utf8') as f:
f.write(reporte_errores)
def graficar_errores_sintacticos(self):
if len(errores) != 0:
reporte_errores = "digraph test {\ngraph [ratio=fill];\nnode [label=\"\\N\", fontsize=15, shape=plaintext];\ngraph [bb=\"0,0,352,154\"];\n"
reporte_errores += "arset [label=<\n<TABLE ALIGN=\"LEFT\">\n<TR>\n<TD>Tipo Error</TD>\n<TD>Descripcion</TD>\n<TD>Linea</TD>\n<TD>Columna</TD>\n</TR>\n"
for error_ in errores:
if error_.descripcion.lower() == "sintáctico":
reporte_errores += '<TR>'
reporte_errores += '<TD>' + error_.descripcion + '</TD>'
reporte_errores += '<TD>' + error_.valor +'</TD>'
reporte_errores += '<TD>' + error_.line +'</TD>'
reporte_errores += '<TD>' + error_.column +'</TD>'
reporte_errores += '</TR>'
reporte_errores += '</TABLE>\n>, ];\n}'
with open('sintactico_reporte.dot', 'w', encoding='utf8') as f:
f.write(reporte_errores)
def graficar_errores_semanticos(self):
if len(errores) != 0:
reporte_errores = "digraph test {\ngraph [ratio=fill];\nnode [label=\"\\N\", fontsize=15, shape=plaintext];\ngraph [bb=\"0,0,352,154\"];\n"
reporte_errores += "arset [label=<\n<TABLE ALIGN=\"LEFT\">\n<TR>\n<TD>Tipo Error</TD>\n<TD>Descripcion</TD>\n<TD>Linea</TD>\n<TD>Columna</TD>\n</TR>\n"
for error_ in errores:
if error_.descripcion.lower() == "semántico":
reporte_errores += '<TR>'
reporte_errores += '<TD>' + error_.descripcion + '</TD>'
reporte_errores += '<TD>' + error_.valor +'</TD>'
reporte_errores += '<TD>' + error_.line +'</TD>'
reporte_errores += '<TD>' + error_.column +'</TD>'
reporte_errores += '</TR>'
reporte_errores += '</TABLE>\n>, ];\n}'
with open('semantico_reporte.dot', 'w', encoding='utf8') as f:
f.write(reporte_errores)
def graficar_TS(self):
reporte_ts = ts.reporte_ts()
with open('ts_reporte.dot', 'w', encoding='utf8') as f:
f.write(reporte_ts)
def compilar_ts_png(self):
img = Source.from_file("ts_reporte.dot", format = "png", encoding="utf8")
img.render()
entrada = self.popup_reporte_png(self.ventana, "ts_reporte.dot.png")
def compilar_ts_pdf(self):
file_pdf = Source.from_file("ts_reporte.dot", format = "pdf", encoding="utf8")
file_pdf.view()
def compilar_grammar_png(self):
img = Source.from_file("grammar_reporte.dot", format = "png", encoding="utf8")
img.render()
entrada = self.popup_reporte_png(self.ventana, "grammar_reporte.dot.png")
def compilar_grammar_pdf(self):
file_pdf = Source.from_file("grammar_reporte.dot", format = "pdf", encoding="utf8")
file_pdf.view()
def compilar_semantico_png(self):
img = Source.from_file("semantico_reporte.dot", format = "png", encoding='utf8')
img.render()
entrada = self.popup_reporte_png(self.ventana, "semantico_reporte.dot.png")
def compilar_semantico_pdf(self):
file_pdf = Source.from_file("semantico_reporte.dot", format = "pdf", encoding='utf8')
file_pdf.view()
def compilar_sintactico_png(self):
img = Source.from_file("sintactico_reporte.dot", format = "png", encoding='utf8')
img.render()
entrada = self.popup_reporte_png(self.ventana, "sintactico_reporte.dot.png")
def compilar_sintactico_pdf(self):
file_pdf = Source.from_file("sintactico_reporte.dot", format = "pdf", encoding='utf8')
file_pdf.view()
def compilar_lexico_png(self):
img = Source.from_file("lexico_reporte.dot", format = "png", encoding='utf8')
img.render()
entrada = self.popup_reporte_png(self.ventana, "lexico_reporte.dot.png")
def compilar_lexico_pdf(self):
file_pdf = Source.from_file("lexico_reporte.dot", format = "pdf", encoding='utf8')
file_pdf.view()
def compilar_Error_png(self):
img = Source.from_file("errores_reporte.dot", format="png", encoding='utf8')
img.render()
entrada = self.popup_reporte_png(self.ventana, "errores_reporte.dot.png")
def compilar_Error_pdf(self):
file_pdf = Source.from_file("errores_reporte.dot", format="pdf", encoding='utf8')
file_pdf.view()
def compilar_AST_png(self):
img = Source.from_file("ast_reporte.dot", format="png", encoding='utf8')
img.render()
entrada = self.popup_reporte_png(self.ventana, "ast_reporte.dot.png")
def compilar_AST_pdf(self):
file_pdf = Source.from_file("ast_reporte.dot", format="pdf", encoding='utf8')
file_pdf.view()
def popup_reporte_png(self, master, path):
top = self.top = Toplevel(master)
img = ImageTk.PhotoImage(Image.open(path))
panel = Label(top, image = img)
panel.image = img
panel.pack(side = "bottom", fill = "both", expand = "yes")
def ejecutar_codigo(self):
errores = []
tab_list = self.tabControl.winfo_children()
current_tab = tab_list[self.tabControl.index(CURRENT)]
txt_box = None
for widget_item in current_tab.winfo_children():
if isinstance(widget_item, Text):
txt_box = widget_item
contenido = txt_box.get(1.0, END)
instruccions = []
instruccions = gramatica.parse(contenido)
self.ejecutar_resultado(instruccions)
for tab_item in self.tab_salida.winfo_children():
for widget_item in tab_item.winfo_children():
if isinstance(widget_item, Text):
widget_item.delete('1.0', END)
add_text("\nPS C:\\Users\\Grupo 23> ")
widget_item.insert(INSERT, get_contenido())
self.graficar_AST(instruccions)
self.graficar_Errores()
self.graficar_errores_lexicos()
self.graficar_errores_sintacticos()
self.graficar_errores_semanticos()
self.graficar_Gramatical(instruccions)
self.graficar_TS()
def ejecutar_resultado(self,instrucciones_):
for instruccion_ in instrucciones_:
instruccion_.ejecutar()
if __name__ == "__main__":
index = window()
index.run()
| true | true |
1c38434c1e88f2c41b796918c5a9fbb25223104e | 3,558 | py | Python | src/cnn_text/exp10.py | numb3r33/dl_nlp | 3b28b883de08efaa2eec4bb3d976906c94b926d6 | [
"MIT"
] | null | null | null | src/cnn_text/exp10.py | numb3r33/dl_nlp | 3b28b883de08efaa2eec4bb3d976906c94b926d6 | [
"MIT"
] | 11 | 2020-03-24T16:56:30.000Z | 2022-03-11T23:45:08.000Z | src/cnn_text/exp10.py | numb3r33/dl_nlp | 3b28b883de08efaa2eec4bb3d976906c94b926d6 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import os
import argparse
from utils import *
from train import *
from losses import *
from models import *
SAMPLE_BASEPATH = '../../data/jigsaw_toxic/processed/'
BASEPATH = '../../data/jigsaw_toxic/raw/'
SUB_PATH = '../../submissions/'
MODEL_FP = '../../models/'
SUB_FN = 'exp10_cnn.csv'
def main(exp_name, run_mode, is_sample):
if run_mode == 'train':
if is_sample:
train = read_csv(os.path.join(SAMPLE_BASEPATH, 'train_sample.csv'))
else:
train = read_csv(os.path.join(BASEPATH, 'train.csv'))
TARGET_COLS = get_target_cols()
train_tokenized_comments = get_tokenized_comments(train.comment_text)
train['tokenized_comments'] = get_tokenized_comments_string(train.comment_text)
words = get_vocab(train_tokenized_comments, exp_name)
#embedding_matrix, UNK, PAD, UNK_IX, PAD_IX = load_glove_embedding_matrix(words)
embedding_matrix, UNK, PAD, UNK_IX, PAD_IX = load_wv_embedding_matrix(words)
token_to_id = get_token_to_id(words, UNK, PAD, UNK_IX, PAD_IX)
print('Vocabulary |V|: {}'.format(len(token_to_id)))
data_train, data_val = get_train_test_splits(train, exp_name)
model = get_exp10_model(embedding_matrix, token_to_id, exp_name, PAD_IX)
criterion = get_exp2_criterion()
optimizer = get_exp2_optimizer(model, exp_name)
model, preds = train_and_evaluate(model, criterion, optimizer, embedding_matrix, token_to_id, exp_name, data_train, data_val, TARGET_COLS, UNK_IX, PAD_IX, run_mode)
else:
train = read_csv(os.path.join(BASEPATH, 'train.csv'))
test = read_csv(os.path.join(BASEPATH, 'test.csv'))
test_labels = read_csv(os.path.join(BASEPATH, 'test_labels.csv'))
TARGET_COLS = get_target_cols()
train_tokenized_comments = get_tokenized_comments(train.comment_text)
train['tokenized_comments'] = get_tokenized_comments_string(train.comment_text)
test['tokenized_comments'] = get_tokenized_comments_string(test.comment_text)
words = get_vocab(train_tokenized_comments, exp_name)
#embedding_matrix, UNK, PAD, UNK_IX, PAD_IX = load_glove_embedding_matrix(words)
embedding_matrix, UNK, PAD, UNK_IX, PAD_IX = load_wv_embedding_matrix(words)
token_to_id = get_token_to_id(words, UNK, PAD, UNK_IX, PAD_IX)
model = get_exp10_model(embedding_matrix, token_to_id, exp_name, PAD_IX)
criterion = get_exp2_criterion()
optimizer = get_exp2_optimizer(model, exp_name)
model, preds = train_and_evaluate(model, criterion, optimizer, embedding_matrix, token_to_id, exp_name, train, test, TARGET_COLS, UNK_IX, PAD_IX, run_mode)
prepare_submission(test_labels, preds, os.path.join(SUB_PATH, SUB_FN))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='CNN for Text Classification ( Kaggle Jigsaw Toxic Comments Classification )')
parser.add_argument('-run_mode', type=str, help='Run the model in train or eval mode')
parser.add_argument('-exp_name', type=str, help='Name of the Experiment')
parser.add_argument('-is_sample', type=bool, help='Do you want to run the experiment on the sample?')
args = parser.parse_args()
run_mode = args.run_mode
exp_name = args.exp_name
is_sample = args.is_sample
main(exp_name, run_mode, is_sample)
| 40.431818 | 172 | 0.68606 | import pandas as pd
import numpy as np
import os
import argparse
from utils import *
from train import *
from losses import *
from models import *
SAMPLE_BASEPATH = '../../data/jigsaw_toxic/processed/'
BASEPATH = '../../data/jigsaw_toxic/raw/'
SUB_PATH = '../../submissions/'
MODEL_FP = '../../models/'
SUB_FN = 'exp10_cnn.csv'
def main(exp_name, run_mode, is_sample):
if run_mode == 'train':
if is_sample:
train = read_csv(os.path.join(SAMPLE_BASEPATH, 'train_sample.csv'))
else:
train = read_csv(os.path.join(BASEPATH, 'train.csv'))
TARGET_COLS = get_target_cols()
train_tokenized_comments = get_tokenized_comments(train.comment_text)
train['tokenized_comments'] = get_tokenized_comments_string(train.comment_text)
words = get_vocab(train_tokenized_comments, exp_name)
embedding_matrix, UNK, PAD, UNK_IX, PAD_IX = load_wv_embedding_matrix(words)
token_to_id = get_token_to_id(words, UNK, PAD, UNK_IX, PAD_IX)
print('Vocabulary |V|: {}'.format(len(token_to_id)))
data_train, data_val = get_train_test_splits(train, exp_name)
model = get_exp10_model(embedding_matrix, token_to_id, exp_name, PAD_IX)
criterion = get_exp2_criterion()
optimizer = get_exp2_optimizer(model, exp_name)
model, preds = train_and_evaluate(model, criterion, optimizer, embedding_matrix, token_to_id, exp_name, data_train, data_val, TARGET_COLS, UNK_IX, PAD_IX, run_mode)
else:
train = read_csv(os.path.join(BASEPATH, 'train.csv'))
test = read_csv(os.path.join(BASEPATH, 'test.csv'))
test_labels = read_csv(os.path.join(BASEPATH, 'test_labels.csv'))
TARGET_COLS = get_target_cols()
train_tokenized_comments = get_tokenized_comments(train.comment_text)
train['tokenized_comments'] = get_tokenized_comments_string(train.comment_text)
test['tokenized_comments'] = get_tokenized_comments_string(test.comment_text)
words = get_vocab(train_tokenized_comments, exp_name)
embedding_matrix, UNK, PAD, UNK_IX, PAD_IX = load_wv_embedding_matrix(words)
token_to_id = get_token_to_id(words, UNK, PAD, UNK_IX, PAD_IX)
model = get_exp10_model(embedding_matrix, token_to_id, exp_name, PAD_IX)
criterion = get_exp2_criterion()
optimizer = get_exp2_optimizer(model, exp_name)
model, preds = train_and_evaluate(model, criterion, optimizer, embedding_matrix, token_to_id, exp_name, train, test, TARGET_COLS, UNK_IX, PAD_IX, run_mode)
prepare_submission(test_labels, preds, os.path.join(SUB_PATH, SUB_FN))
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='CNN for Text Classification ( Kaggle Jigsaw Toxic Comments Classification )')
parser.add_argument('-run_mode', type=str, help='Run the model in train or eval mode')
parser.add_argument('-exp_name', type=str, help='Name of the Experiment')
parser.add_argument('-is_sample', type=bool, help='Do you want to run the experiment on the sample?')
args = parser.parse_args()
run_mode = args.run_mode
exp_name = args.exp_name
is_sample = args.is_sample
main(exp_name, run_mode, is_sample)
| true | true |
1c38437f64fca9bd28d1e4f5a7c12568c004f30a | 909 | py | Python | receiver.py | lobdellb/brycemodem | d95431e93b54b08d07ebefc7b64b8c89764a4f12 | [
"MIT"
] | null | null | null | receiver.py | lobdellb/brycemodem | d95431e93b54b08d07ebefc7b64b8c89764a4f12 | [
"MIT"
] | null | null | null | receiver.py | lobdellb/brycemodem | d95431e93b54b08d07ebefc7b64b8c89764a4f12 | [
"MIT"
] | null | null | null |
import pyaudio
import wave
import time
import sys
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
# instantiate PyAudio (1)
p = pyaudio.PyAudio()
# define callback (2)
def callback(in_data, frame_count, time_info, status):
print(frame_count)
data = wf.readframes(frame_count)
return (data, pyaudio.paContinue)
# open stream using callback (3)
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output=True,
stream_callback=callback)
# start the stream (4)
stream.start_stream()
# wait for stream to finish (5)
while stream.is_active():
time.sleep(0.1)
# stop stream (6)
stream.stop_stream()
stream.close()
wf.close()
# close PyAudio (7)
p.terminate() | 19.76087 | 71 | 0.656766 |
import pyaudio
import wave
import time
import sys
if len(sys.argv) < 2:
print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0])
sys.exit(-1)
wf = wave.open(sys.argv[1], 'rb')
p = pyaudio.PyAudio()
def callback(in_data, frame_count, time_info, status):
print(frame_count)
data = wf.readframes(frame_count)
return (data, pyaudio.paContinue)
stream = p.open(format=p.get_format_from_width(wf.getsampwidth()),
channels=wf.getnchannels(),
rate=wf.getframerate(),
output=True,
stream_callback=callback)
stream.start_stream()
while stream.is_active():
time.sleep(0.1)
stream.stop_stream()
stream.close()
wf.close()
p.terminate() | true | true |
1c384397f9a6a26b3987220dac9d9957db3815cd | 2,482 | py | Python | themes/prompt-toolkit/base16/base16-cupertino.py | base16-fork/base16-fork | 79856b7e6195dde0874a9e6d191101ac6c5c74f5 | [
"MIT"
] | null | null | null | themes/prompt-toolkit/base16/base16-cupertino.py | base16-fork/base16-fork | 79856b7e6195dde0874a9e6d191101ac6c5c74f5 | [
"MIT"
] | null | null | null | themes/prompt-toolkit/base16/base16-cupertino.py | base16-fork/base16-fork | 79856b7e6195dde0874a9e6d191101ac6c5c74f5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# base16-prompt-toolkit (https://github.com/memeplex/base16-prompt-toolkit)
# Base16 Prompt Toolkit template by Carlos Pita (carlosjosepita@gmail.com
# Cupertino scheme by Defman21
try:
# older than v2
from prompt_toolkit.output.vt100 import _256_colors
except ModuleNotFoundError:
# version 2
from prompt_toolkit.formatted_text.ansi import _256_colors
from pygments.style import Style
from pygments.token import (Keyword, Name, Comment, String, Error, Text,
Number, Operator, Literal, Token)
# See http://chriskempson.com/projects/base16/ for a description of the role
# of the different colors in the base16 palette.
base00 = '#ffffff'
base01 = '#c0c0c0'
base02 = '#c0c0c0'
base03 = '#808080'
base04 = '#808080'
base05 = '#404040'
base06 = '#404040'
base07 = '#5e5e5e'
base08 = '#c41a15'
base09 = '#eb8500'
base0A = '#826b28'
base0B = '#007400'
base0C = '#318495'
base0D = '#0000ff'
base0E = '#a90d91'
base0F = '#826b28'
# See https://github.com/jonathanslenders/python-prompt-toolkit/issues/355
colors = (globals()['base0' + d] for d in '08BADEC5379F1246')
for i, color in enumerate(colors):
r, g, b = int(color[1:3], 16), int(color[3:5], 16), int(color[5:], 16)
_256_colors[r, g, b] = i + 6 if i > 8 else i
# See http://pygments.org/docs/tokens/ for a description of the different
# pygments tokens.
class Base16Style(Style):
background_color = base00
highlight_color = base02
default_style = base05
styles = {
Text: base05,
Error: '%s bold' % base08,
Comment: base03,
Keyword: base0E,
Keyword.Constant: base09,
Keyword.Namespace: base0D,
Name.Builtin: base0D,
Name.Function: base0D,
Name.Class: base0D,
Name.Decorator: base0E,
Name.Exception: base08,
Number: base09,
Operator: base0E,
Literal: base0B,
String: base0B
}
# See https://github.com/jonathanslenders/python-prompt-toolkit/blob/master/prompt_toolkit/styles/defaults.py
# for a description of prompt_toolkit related pseudo-tokens.
overrides = {
Token.Prompt: base0B,
Token.PromptNum: '%s bold' % base0B,
Token.OutPrompt: base08,
Token.OutPromptNum: '%s bold' % base08,
Token.Menu.Completions.Completion: 'bg:%s %s' % (base01, base04),
Token.Menu.Completions.Completion.Current: 'bg:%s %s' % (base04, base01),
Token.MatchingBracket.Other: 'bg:%s %s' % (base03, base00)
}
| 29.903614 | 109 | 0.672039 |
try:
from prompt_toolkit.output.vt100 import _256_colors
except ModuleNotFoundError:
from prompt_toolkit.formatted_text.ansi import _256_colors
from pygments.style import Style
from pygments.token import (Keyword, Name, Comment, String, Error, Text,
Number, Operator, Literal, Token)
base00 = '#ffffff'
base01 = '#c0c0c0'
base02 = '#c0c0c0'
base03 = '#808080'
base04 = '#808080'
base05 = '#404040'
base06 = '#404040'
base07 = '#5e5e5e'
base08 = '#c41a15'
base09 = '#eb8500'
base0A = '#826b28'
base0B = '#007400'
base0C = '#318495'
base0D = '#0000ff'
base0E = '#a90d91'
base0F = '#826b28'
colors = (globals()['base0' + d] for d in '08BADEC5379F1246')
for i, color in enumerate(colors):
r, g, b = int(color[1:3], 16), int(color[3:5], 16), int(color[5:], 16)
_256_colors[r, g, b] = i + 6 if i > 8 else i
class Base16Style(Style):
background_color = base00
highlight_color = base02
default_style = base05
styles = {
Text: base05,
Error: '%s bold' % base08,
Comment: base03,
Keyword: base0E,
Keyword.Constant: base09,
Keyword.Namespace: base0D,
Name.Builtin: base0D,
Name.Function: base0D,
Name.Class: base0D,
Name.Decorator: base0E,
Name.Exception: base08,
Number: base09,
Operator: base0E,
Literal: base0B,
String: base0B
}
overrides = {
Token.Prompt: base0B,
Token.PromptNum: '%s bold' % base0B,
Token.OutPrompt: base08,
Token.OutPromptNum: '%s bold' % base08,
Token.Menu.Completions.Completion: 'bg:%s %s' % (base01, base04),
Token.Menu.Completions.Completion.Current: 'bg:%s %s' % (base04, base01),
Token.MatchingBracket.Other: 'bg:%s %s' % (base03, base00)
}
| true | true |
1c3844f5ad8864ed597c03a7ca8fc9a33d3aea8e | 3,684 | py | Python | examples/resnet/train_fleet_static_amp.py | sandyhouse/FleetX | b3d089cdb0f388c12ad95494ee98053d5bfa450b | [
"Apache-2.0"
] | 170 | 2020-08-12T12:07:01.000Z | 2022-03-07T02:38:26.000Z | examples/resnet/train_fleet_static_amp.py | sandyhouse/FleetX | b3d089cdb0f388c12ad95494ee98053d5bfa450b | [
"Apache-2.0"
] | 195 | 2020-08-13T03:22:15.000Z | 2022-03-30T07:40:25.000Z | examples/resnet/train_fleet_static_amp.py | sandyhouse/FleetX | b3d089cdb0f388c12ad95494ee98053d5bfa450b | [
"Apache-2.0"
] | 67 | 2020-08-14T02:07:46.000Z | 2022-03-28T10:05:33.000Z | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import argparse
import ast
import paddle
from paddle.distributed import fleet
import resnet_static as resnet
import os
base_lr = 0.1
momentum_rate = 0.9
l2_decay = 1e-4
epoch = 10
batch_size = 32
class_dim = 102
def optimizer_setting(parameter_list=None):
optimizer = paddle.optimizer.Momentum(
learning_rate=base_lr,
momentum=momentum_rate,
weight_decay=paddle.regularizer.L2Decay(l2_decay),
parameters=parameter_list)
return optimizer
def get_train_loader(feed_list, place):
def reader_decorator(reader):
def __reader__():
for item in reader():
img = np.array(item[0]).astype('float32').reshape(3, 224, 224)
label = np.array(item[1]).astype('int64').reshape(1)
yield img, label
return __reader__
train_reader = paddle.batch(
reader_decorator(paddle.dataset.flowers.train(use_xmap=True)),
batch_size=batch_size,
drop_last=True)
train_loader = paddle.io.DataLoader.from_generator(
capacity=32,
use_double_buffer=True,
feed_list=feed_list,
iterable=True)
train_loader.set_sample_list_generator(train_reader, place)
return train_loader
def train_resnet():
paddle.enable_static()
paddle.vision.set_image_backend('cv2')
image = paddle.static.data(name="x", shape=[None, 3, 224, 224], dtype='float32')
label= paddle.static.data(name="y", shape=[None, 1], dtype='int64')
model = resnet.ResNet(layers=50)
out = model.net(input=image, class_dim=class_dim)
avg_cost = paddle.nn.functional.cross_entropy(input=out, label=label)
acc_top1 = paddle.metric.accuracy(input=out, label=label, k=1)
acc_top5 = paddle.metric.accuracy(input=out, label=label, k=5)
place = paddle.CUDAPlace(int(os.environ.get('FLAGS_selected_gpus', 0)))
train_loader = get_train_loader([image, label], place)
strategy = fleet.DistributedStrategy()
strategy.amp = True
strategy.amp_configs = {
"init_loss_scaling": 32768,
"decr_every_n_nan_or_inf": 2,
"incr_every_n_steps": 1000,
"incr_ratio": 2.0,
"use_dynamic_loss_scaling": True,
"decr_ratio": 0.5,
"custom_white_list": [],
"custom_black_list": [],
}
fleet.init(is_collective=True, strategy=strategy)
optimizer = optimizer_setting()
optimizer = fleet.distributed_optimizer(optimizer)
optimizer.minimize(avg_cost)
exe = paddle.static.Executor(place)
exe.run(paddle.static.default_startup_program())
epoch = 10
step = 0
for eop in range(epoch):
for batch_id, data in enumerate(train_loader()):
loss, acc1, acc5 = exe.run(paddle.static.default_main_program(), feed=data, fetch_list=[avg_cost.name, acc_top1.name, acc_top5.name])
if batch_id % 5 == 0:
print("[Epoch %d, batch %d] loss: %.5f, acc1: %.5f, acc5: %.5f" % (eop, batch_id, loss, acc1, acc5))
if __name__ == '__main__':
train_resnet()
| 33.798165 | 158 | 0.678882 |
import numpy as np
import argparse
import ast
import paddle
from paddle.distributed import fleet
import resnet_static as resnet
import os
base_lr = 0.1
momentum_rate = 0.9
l2_decay = 1e-4
epoch = 10
batch_size = 32
class_dim = 102
def optimizer_setting(parameter_list=None):
optimizer = paddle.optimizer.Momentum(
learning_rate=base_lr,
momentum=momentum_rate,
weight_decay=paddle.regularizer.L2Decay(l2_decay),
parameters=parameter_list)
return optimizer
def get_train_loader(feed_list, place):
def reader_decorator(reader):
def __reader__():
for item in reader():
img = np.array(item[0]).astype('float32').reshape(3, 224, 224)
label = np.array(item[1]).astype('int64').reshape(1)
yield img, label
return __reader__
train_reader = paddle.batch(
reader_decorator(paddle.dataset.flowers.train(use_xmap=True)),
batch_size=batch_size,
drop_last=True)
train_loader = paddle.io.DataLoader.from_generator(
capacity=32,
use_double_buffer=True,
feed_list=feed_list,
iterable=True)
train_loader.set_sample_list_generator(train_reader, place)
return train_loader
def train_resnet():
paddle.enable_static()
paddle.vision.set_image_backend('cv2')
image = paddle.static.data(name="x", shape=[None, 3, 224, 224], dtype='float32')
label= paddle.static.data(name="y", shape=[None, 1], dtype='int64')
model = resnet.ResNet(layers=50)
out = model.net(input=image, class_dim=class_dim)
avg_cost = paddle.nn.functional.cross_entropy(input=out, label=label)
acc_top1 = paddle.metric.accuracy(input=out, label=label, k=1)
acc_top5 = paddle.metric.accuracy(input=out, label=label, k=5)
place = paddle.CUDAPlace(int(os.environ.get('FLAGS_selected_gpus', 0)))
train_loader = get_train_loader([image, label], place)
strategy = fleet.DistributedStrategy()
strategy.amp = True
strategy.amp_configs = {
"init_loss_scaling": 32768,
"decr_every_n_nan_or_inf": 2,
"incr_every_n_steps": 1000,
"incr_ratio": 2.0,
"use_dynamic_loss_scaling": True,
"decr_ratio": 0.5,
"custom_white_list": [],
"custom_black_list": [],
}
fleet.init(is_collective=True, strategy=strategy)
optimizer = optimizer_setting()
optimizer = fleet.distributed_optimizer(optimizer)
optimizer.minimize(avg_cost)
exe = paddle.static.Executor(place)
exe.run(paddle.static.default_startup_program())
epoch = 10
step = 0
for eop in range(epoch):
for batch_id, data in enumerate(train_loader()):
loss, acc1, acc5 = exe.run(paddle.static.default_main_program(), feed=data, fetch_list=[avg_cost.name, acc_top1.name, acc_top5.name])
if batch_id % 5 == 0:
print("[Epoch %d, batch %d] loss: %.5f, acc1: %.5f, acc5: %.5f" % (eop, batch_id, loss, acc1, acc5))
if __name__ == '__main__':
train_resnet()
| true | true |
1c38451fb424e2b6a391207b83c511a087bace8a | 6,177 | py | Python | cli/scaleout/details.py | ScilifelabDataCentre/stackn | 00a65a16ff271f04548b3ff475c72dacbfd916df | [
"Apache-2.0"
] | null | null | null | cli/scaleout/details.py | ScilifelabDataCentre/stackn | 00a65a16ff271f04548b3ff475c72dacbfd916df | [
"Apache-2.0"
] | null | null | null | cli/scaleout/details.py | ScilifelabDataCentre/stackn | 00a65a16ff271f04548b3ff475c72dacbfd916df | [
"Apache-2.0"
] | null | null | null | import platform
import psutil
import logging
import json
import os
import sys
from scaleout.cli.helpers import prompt
def get_system_details(info):
try:
info['Platform'] = platform.system()
#info['Platform version'] = platform.version()
info['Architecture'] = platform.machine()
info['Processor'] = platform.processor()
info['RAM'] = str(round(psutil.virtual_memory().total / (1024.0 **3))) + " GB"
info['Python version'] = platform.python_version()
json_prep = json.dumps(info)
return json.loads(json_prep)
except Exception as e:
print("Failed to retrieve details about your system.")
logging.exception(e)
def get_cpu_details(info):
try:
info['Physical cores'] = psutil.cpu_count(logical=False)
info['Total cores'] = psutil.cpu_count(logical=True)
for i, percentage in enumerate(psutil.cpu_percent(percpu=True, interval=1)):
info[f'Core {i}'] = f'{percentage}%'
info['Total CPU usage'] = f'{psutil.cpu_percent()}%'
json_prep = json.dumps(info)
return json.loads(json_prep)
except Exception as e:
print("Failed to retrieve details about the CPU of your machine.")
logging.exception(e)
# Function that pauses the run until the user either commits changed files in the repo, or tells the program to contnue training with uncommitted files
# ---------------- Question -------------------
# Should all files be committed before training or is it enough if the user commits some files in the repo?
# ---------------------------------------------
def commit_helper(repo, exit_message): # This function needs to be tested and modified. Might note even be necessary to have this function
print('WARNING: Uncommitted files exist in the current Git repository. Training the model with uncommitted files '\
+ 'should be avoided for major experiments since this will negatively impact code versioning. To increase future ' \
+ 'reproducibility of your experiment, please consider committing all files before training the model.\n')
valid = ["1", "2"]
while True:
answer = input("What do you want to do? \n" \
+ " 1) Continue training the model without committing my files (Not recommended). \n"\
+ " 2) Put the training session on hold to commit my files (Highly recommended). \n"\
+ "Choose an option [1 or 2]: ")
if answer in valid:
break
else:
print("\nPlease respond with '1' or '2'. \n")
if answer == "1":
print("\nThe training session will continue with uncommitted files in the repo. This might affect the reproducibility of your experiment.")
question = "Are you sure you want to continue?"
confirmed = prompt(question)
if confirmed:
return False
else:
sys.exit(exit_message.format("commit your files"))
else:
# The user wants to commit files before continuing model training.
# We could let the user add and commit files here with a subprocess operation? E.g. subprocess.run("git add .", check=True, shell=True)
answer = input("\nA good choice! After you commit your files, press enter to continue training the model "\
+ "(or abort the current training session by pressing arbitrary key): ")
if answer:
sys.exit(exit_message.format("commit your files"))
else: # Would be good to check here whether the files have been committed successfully. Maybe the user does not want to commit all files?
print("Perfect, your files have been committed and the training session will continue.")
#while True:
# if not repo.is_dirty():
# break
# else:
return True
def get_git_details(code_version):
exit_message = "Aborting this training session. Please {} before running 'stackn train' again."
try:
import git
except ImportError:
print('Failed to import Git')
return None
try:
# current_repo = git.Repo(os.getcwd()) # Which one of these should we use? Needs testing
current_repo = git.Repo(search_parent_directories=True)
is_committed = True
if current_repo.is_dirty(): # This should be true if uncommitted files exist
is_committed = commit_helper(current_repo, exit_message)
latest_commit = current_repo.head.object.hexsha
print("Code version {} will be tied to the Git commit hash '{}'.".format(code_version, latest_commit))
if not is_committed:
print("Since uncommitted files exist in the current repo, it will be noted in the training log that the code " \
+ "used to train the model in this run does not correspond to the recorded commit hash. " \
+ "This is done mainly for the purpose of appropriate code versioning and future reproducibility.")
except (git.InvalidGitRepositoryError, ValueError):
latest_commit = "No recent Git commit to log"
if git.InvalidGitRepositoryError:
print('WARNING: Failed to extract Git repo. Check to see if you are currently working in a Git repository.')
question = "Do you want to continue training the model anyways (not recommended)?"
confirmed = prompt(question)
if confirmed:
current_repo = "No Git repository to log"
else:
sys.exit(exit_message.format('enter an active Git repo'))
elif ValueError and not committed_files:
print("WARNING: Failed to extract latest Git commit hash. No commits seem to have been made yet and you have chosen not to commit them. " \
+ "The training session will continue.")
return (current_repo, latest_commit)
def get_run_details(code_version):
system_details = get_system_details({})
cpu_details = get_cpu_details({})
git_details = get_git_details(code_version)
return system_details, cpu_details, git_details
| 51.049587 | 151 | 0.646592 | import platform
import psutil
import logging
import json
import os
import sys
from scaleout.cli.helpers import prompt
def get_system_details(info):
try:
info['Platform'] = platform.system()
info['Architecture'] = platform.machine()
info['Processor'] = platform.processor()
info['RAM'] = str(round(psutil.virtual_memory().total / (1024.0 **3))) + " GB"
info['Python version'] = platform.python_version()
json_prep = json.dumps(info)
return json.loads(json_prep)
except Exception as e:
print("Failed to retrieve details about your system.")
logging.exception(e)
def get_cpu_details(info):
try:
info['Physical cores'] = psutil.cpu_count(logical=False)
info['Total cores'] = psutil.cpu_count(logical=True)
for i, percentage in enumerate(psutil.cpu_percent(percpu=True, interval=1)):
info[f'Core {i}'] = f'{percentage}%'
info['Total CPU usage'] = f'{psutil.cpu_percent()}%'
json_prep = json.dumps(info)
return json.loads(json_prep)
except Exception as e:
print("Failed to retrieve details about the CPU of your machine.")
logging.exception(e)
def commit_helper(repo, exit_message):
print('WARNING: Uncommitted files exist in the current Git repository. Training the model with uncommitted files '\
+ 'should be avoided for major experiments since this will negatively impact code versioning. To increase future ' \
+ 'reproducibility of your experiment, please consider committing all files before training the model.\n')
valid = ["1", "2"]
while True:
answer = input("What do you want to do? \n" \
+ " 1) Continue training the model without committing my files (Not recommended). \n"\
+ " 2) Put the training session on hold to commit my files (Highly recommended). \n"\
+ "Choose an option [1 or 2]: ")
if answer in valid:
break
else:
print("\nPlease respond with '1' or '2'. \n")
if answer == "1":
print("\nThe training session will continue with uncommitted files in the repo. This might affect the reproducibility of your experiment.")
question = "Are you sure you want to continue?"
confirmed = prompt(question)
if confirmed:
return False
else:
sys.exit(exit_message.format("commit your files"))
else:
answer = input("\nA good choice! After you commit your files, press enter to continue training the model "\
+ "(or abort the current training session by pressing arbitrary key): ")
if answer:
sys.exit(exit_message.format("commit your files"))
else:
print("Perfect, your files have been committed and the training session will continue.")
return True
def get_git_details(code_version):
exit_message = "Aborting this training session. Please {} before running 'stackn train' again."
try:
import git
except ImportError:
print('Failed to import Git')
return None
try:
rectories=True)
is_committed = True
if current_repo.is_dirty():
is_committed = commit_helper(current_repo, exit_message)
latest_commit = current_repo.head.object.hexsha
print("Code version {} will be tied to the Git commit hash '{}'.".format(code_version, latest_commit))
if not is_committed:
print("Since uncommitted files exist in the current repo, it will be noted in the training log that the code " \
+ "used to train the model in this run does not correspond to the recorded commit hash. " \
+ "This is done mainly for the purpose of appropriate code versioning and future reproducibility.")
except (git.InvalidGitRepositoryError, ValueError):
latest_commit = "No recent Git commit to log"
if git.InvalidGitRepositoryError:
print('WARNING: Failed to extract Git repo. Check to see if you are currently working in a Git repository.')
question = "Do you want to continue training the model anyways (not recommended)?"
confirmed = prompt(question)
if confirmed:
current_repo = "No Git repository to log"
else:
sys.exit(exit_message.format('enter an active Git repo'))
elif ValueError and not committed_files:
print("WARNING: Failed to extract latest Git commit hash. No commits seem to have been made yet and you have chosen not to commit them. " \
+ "The training session will continue.")
return (current_repo, latest_commit)
def get_run_details(code_version):
system_details = get_system_details({})
cpu_details = get_cpu_details({})
git_details = get_git_details(code_version)
return system_details, cpu_details, git_details
| true | true |
1c3845d71bd0808be9a45e1440e146be6fafa885 | 11,636 | py | Python | pychron/experiment/experimentor.py | ASUPychron/pychron | dfe551bdeb4ff8b8ba5cdea0edab336025e8cc76 | [
"Apache-2.0"
] | 31 | 2016-03-07T02:38:17.000Z | 2022-02-14T18:23:43.000Z | pychron/experiment/experimentor.py | ASUPychron/pychron | dfe551bdeb4ff8b8ba5cdea0edab336025e8cc76 | [
"Apache-2.0"
] | 1,626 | 2015-01-07T04:52:35.000Z | 2022-03-25T19:15:59.000Z | pychron/experiment/experimentor.py | UIllinoisHALPychron/pychron | f21b79f4592a9fb9dc9a4cb2e4e943a3885ededc | [
"Apache-2.0"
] | 26 | 2015-05-23T00:10:06.000Z | 2022-03-07T16:51:57.000Z | # ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from traits.api import Instance, List, on_trait_change, Bool, Event
from pychron.dvc.dvc_irradiationable import DVCIrradiationable
from pychron.experiment.experiment_executor import ExperimentExecutor
from pychron.experiment.factory import ExperimentFactory
from pychron.experiment.queue.experiment_queue import ExperimentQueue
class Experimentor(DVCIrradiationable):
experiment_factory = Instance(ExperimentFactory)
experiment_queue = Instance(ExperimentQueue)
executor = Instance(ExperimentExecutor)
experiment_queues = List
# stats = Instance(StatsGroup, ())
mode = None
# unique_executor_db = False
save_enabled = Bool
# ===========================================================================
# permissions
# ===========================================================================
# max_allowable_runs = 10000
# can_edit_scripts = True
# _last_ver_time = None
# _ver_timeout = 10
# ===========================================================================
# task events
# ===========================================================================
activate_editor_event = Event
save_event = Event
def prepare_destory(self):
if self.executor:
if self.executor.datahub:
self.executor.datahub.prepare_destroy()
if self.experiment_factory:
if self.experiment_factory.run_factory:
if self.experiment_factory.run_factory.datahub:
self.experiment_factory.run_factory.datahub.prepare_destroy()
def load(self):
self.experiment_factory.queue_factory.db_refresh_needed = True
self.experiment_factory.run_factory.db_refresh_needed = True
return True
def reset_run_generator(self):
if self.executor.is_alive():
self.debug("Queue modified. Reset run generator")
# self.executor.queue_modified = True
self.executor.set_queue_modified()
def refresh_executable(self, qs=None):
if qs is None:
qs = self.experiment_queues
if self.executor.is_alive():
qs = (self.executor.experiment_queue,)
self.executor.executable = all([ei.is_executable() for ei in qs])
self.debug("setting executable {}".format(self.executor.executable))
def update_queues(self):
self._update_queues()
def update_info(self):
try:
self._update()
except BaseException as e:
self.debug_exception()
self.warning_dialog("Failed updating info: Error={}".format(e))
# ===============================================================================
# info update
# ===============================================================================
def _get_all_automated_runs(self, qs=None):
if qs is None:
qs = self.experiment_queues
return [ai for ei in qs for ai in ei.automated_runs if ai.executable]
def _update(self, queues=None):
self.debug("update runs")
if queues is None:
queues = self.experiment_queues
queues = [qi for qi in queues if qi.is_updateable()]
if not queues:
return
self.debug("executor executable {}".format(self.executor.executable))
self.debug("updating stats, ")
self.executor.stats.experiment_queues = queues
self.executor.stats.calculate()
self.refresh_executable(queues)
self._set_analysis_metadata()
self.debug("info updated")
for qi in queues:
qi.refresh_table_needed = True
def _set_analysis_metadata(self):
cache = dict()
db = self.get_database()
aruns = self._get_all_automated_runs()
with db.session_ctx():
for ai in aruns:
if ai.skip:
continue
ln = ai.labnumber
if ln == "dg":
continue
# is run in cache
if ln not in cache:
info = db.get_identifier_info(ln)
self.debug("Info for {}={}".format(ln, info))
if not info:
cache[ln] = dict(identifier_error=True)
else:
info["identifier_error"] = False
cache[ln] = info
ai.trait_set(**cache[ln])
def execute_queues(self, queues):
names = ",".join([e.name for e in queues])
self.debug("queues: n={}, names={}".format(len(queues), names))
self.executor.trait_set(experiment_queues=queues, experiment_queue=queues[0])
return self.executor.execute()
def verify_database_connection(self, inform=True):
db = self.get_database()
if db is not None:
if db.connect(force=True):
return True
elif inform:
self.warning_dialog("No Database available")
def sync_queue(self, queue):
ms = queue.mass_spectrometer
ed = queue.extract_device
db = self.get_database()
with db.session_ctx():
next_pos = None
for i, ai in enumerate(queue.automated_runs):
if ai.skip or ai.is_special():
continue
kw = {
"identifier": ai.identifier,
"position": ai.position,
"mass_spectrometer": ms.lower(),
"extract_device": ed,
}
if ai.is_step_heat():
kw["aliquot"] = ai.aliquot
kw["extract_value"] = ai.extract_value
self.debug("checking {}/{}. attr={}".format(i, ai.runid, kw))
aa = db.get_analysis_by_attr(**kw)
if aa is None:
self.debug("----- not found")
if next_pos == ai:
i -= 1
break
elif not self.confirmation_dialog(
"Found analyses up to {}. "
"position={}, extract={}. "
"Continue searching?".format(
ai.runid, ai.extract_value, ai.position
)
):
break
next_pos = queue.automated_runs[i + 1]
if i:
if i == len(queue.automated_runs) - 1:
self.information_dialog(
"All Analyses from this experiment have been run"
)
else:
queue.automated_runs = queue.automated_runs[i:]
else:
self.information_dialog(
"No Analyses from this experiment have been run"
)
# ===============================================================================
# handlers
# ===============================================================================
def _experiment_queue_changed(self, eq):
if eq:
self.experiment_factory.queue = eq
self.experiment_factory.sync_queue_meta()
self.experiment_factory.edit_enabled = True
else:
self.experiment_factory.edit_enabled = False
@on_trait_change("executor:experiment_queue")
def _activate_editor(self, eq):
self.activate_editor_event = id(eq)
@on_trait_change("experiment_queues[]")
def _update_queues(self):
qs = self.experiment_queues
self.executor.stats.experiment_queues = qs
@on_trait_change("experiment_factory:run_factory:changed")
def _queue_dirty(self):
self.experiment_queue.changed = True
@on_trait_change("experiment_queue:dclicked")
def _dclicked_changed(self, new):
self.experiment_factory.run_factory.edit_mode = True
self._set_factory_runs(self.experiment_queue.selected)
@on_trait_change("experiment_factory:run_factory:update_info_needed")
def _refresh3(self):
self.debug("update info needed fired")
self.update_info()
@on_trait_change("executor:queue_modified")
def _refresh5(self, new):
if new:
self.debug("queue modified fired")
self.update_info()
@on_trait_change("experiment_factory:run_factory:refresh_table_needed")
def _refresh4(self):
for qi in self.experiment_queues:
qi.refresh_table_needed = True
@on_trait_change("experiment_factory:save_button")
def _save_update(self):
self.save_event = True
self.update_info()
@on_trait_change("experiment_queue:refresh_info_needed")
def _handle_refresh(self):
self.update_info()
@on_trait_change("experiment_queue:selected")
def _selected_changed(self, new):
ef = self.experiment_factory
rf = ef.run_factory
rf.edit_mode = False
if new:
self._set_factory_runs(new)
# if self.executor.is_alive():
a = new[-1]
if not a.skip:
self.executor.stats.calculate_at(a, at_times=self.executor.is_alive())
# self.stats.calculate()
@on_trait_change("experiment_factory:queue_factory:delay_between_analyses")
def handle_delay_between_analyses(self, new):
if self.executor.is_alive():
self.executor.experiment_queue.delay_between_analyses = new
def _set_factory_runs(self, new):
ef = self.experiment_factory
rf = ef.run_factory
# print 'set runs'
# rf.special_labnumber = 'Special Labnumber'
rf.suppress_update = True
rf.set_selected_runs(new)
rf.suppress_update = False
def _executor_factory(self):
e = ExperimentExecutor(mode=self.mode, application=self.application)
e.bind_preferences()
return e
# ===============================================================================
# defaults
# ===============================================================================
def _executor_default(self):
return self._executor_factory()
def _experiment_factory_default(self):
dms = "Spectrometer"
if self.application:
p2 = (
"pychron.spectrometer.base_spectrometer_manager.BaseSpectrometerManager"
)
spec = self.application.get_service(p2)
if spec:
dms = spec.name.capitalize()
e = ExperimentFactory(
application=self.application, dvc=self.dvc, default_mass_spectrometer=dms
)
return e
# ============= EOF =============================================
| 34.942943 | 88 | 0.544173 |
from __future__ import absolute_import
from traits.api import Instance, List, on_trait_change, Bool, Event
from pychron.dvc.dvc_irradiationable import DVCIrradiationable
from pychron.experiment.experiment_executor import ExperimentExecutor
from pychron.experiment.factory import ExperimentFactory
from pychron.experiment.queue.experiment_queue import ExperimentQueue
class Experimentor(DVCIrradiationable):
experiment_factory = Instance(ExperimentFactory)
experiment_queue = Instance(ExperimentQueue)
executor = Instance(ExperimentExecutor)
experiment_queues = List
mode = None
save_enabled = Bool
activate_editor_event = Event
save_event = Event
def prepare_destory(self):
if self.executor:
if self.executor.datahub:
self.executor.datahub.prepare_destroy()
if self.experiment_factory:
if self.experiment_factory.run_factory:
if self.experiment_factory.run_factory.datahub:
self.experiment_factory.run_factory.datahub.prepare_destroy()
def load(self):
self.experiment_factory.queue_factory.db_refresh_needed = True
self.experiment_factory.run_factory.db_refresh_needed = True
return True
def reset_run_generator(self):
if self.executor.is_alive():
self.debug("Queue modified. Reset run generator")
self.executor.set_queue_modified()
def refresh_executable(self, qs=None):
if qs is None:
qs = self.experiment_queues
if self.executor.is_alive():
qs = (self.executor.experiment_queue,)
self.executor.executable = all([ei.is_executable() for ei in qs])
self.debug("setting executable {}".format(self.executor.executable))
def update_queues(self):
self._update_queues()
def update_info(self):
try:
self._update()
except BaseException as e:
self.debug_exception()
self.warning_dialog("Failed updating info: Error={}".format(e))
def _get_all_automated_runs(self, qs=None):
if qs is None:
qs = self.experiment_queues
return [ai for ei in qs for ai in ei.automated_runs if ai.executable]
def _update(self, queues=None):
self.debug("update runs")
if queues is None:
queues = self.experiment_queues
queues = [qi for qi in queues if qi.is_updateable()]
if not queues:
return
self.debug("executor executable {}".format(self.executor.executable))
self.debug("updating stats, ")
self.executor.stats.experiment_queues = queues
self.executor.stats.calculate()
self.refresh_executable(queues)
self._set_analysis_metadata()
self.debug("info updated")
for qi in queues:
qi.refresh_table_needed = True
def _set_analysis_metadata(self):
cache = dict()
db = self.get_database()
aruns = self._get_all_automated_runs()
with db.session_ctx():
for ai in aruns:
if ai.skip:
continue
ln = ai.labnumber
if ln == "dg":
continue
if ln not in cache:
info = db.get_identifier_info(ln)
self.debug("Info for {}={}".format(ln, info))
if not info:
cache[ln] = dict(identifier_error=True)
else:
info["identifier_error"] = False
cache[ln] = info
ai.trait_set(**cache[ln])
def execute_queues(self, queues):
names = ",".join([e.name for e in queues])
self.debug("queues: n={}, names={}".format(len(queues), names))
self.executor.trait_set(experiment_queues=queues, experiment_queue=queues[0])
return self.executor.execute()
def verify_database_connection(self, inform=True):
db = self.get_database()
if db is not None:
if db.connect(force=True):
return True
elif inform:
self.warning_dialog("No Database available")
def sync_queue(self, queue):
ms = queue.mass_spectrometer
ed = queue.extract_device
db = self.get_database()
with db.session_ctx():
next_pos = None
for i, ai in enumerate(queue.automated_runs):
if ai.skip or ai.is_special():
continue
kw = {
"identifier": ai.identifier,
"position": ai.position,
"mass_spectrometer": ms.lower(),
"extract_device": ed,
}
if ai.is_step_heat():
kw["aliquot"] = ai.aliquot
kw["extract_value"] = ai.extract_value
self.debug("checking {}/{}. attr={}".format(i, ai.runid, kw))
aa = db.get_analysis_by_attr(**kw)
if aa is None:
self.debug("----- not found")
if next_pos == ai:
i -= 1
break
elif not self.confirmation_dialog(
"Found analyses up to {}. "
"position={}, extract={}. "
"Continue searching?".format(
ai.runid, ai.extract_value, ai.position
)
):
break
next_pos = queue.automated_runs[i + 1]
if i:
if i == len(queue.automated_runs) - 1:
self.information_dialog(
"All Analyses from this experiment have been run"
)
else:
queue.automated_runs = queue.automated_runs[i:]
else:
self.information_dialog(
"No Analyses from this experiment have been run"
)
def _experiment_queue_changed(self, eq):
if eq:
self.experiment_factory.queue = eq
self.experiment_factory.sync_queue_meta()
self.experiment_factory.edit_enabled = True
else:
self.experiment_factory.edit_enabled = False
@on_trait_change("executor:experiment_queue")
def _activate_editor(self, eq):
self.activate_editor_event = id(eq)
@on_trait_change("experiment_queues[]")
def _update_queues(self):
qs = self.experiment_queues
self.executor.stats.experiment_queues = qs
@on_trait_change("experiment_factory:run_factory:changed")
def _queue_dirty(self):
self.experiment_queue.changed = True
@on_trait_change("experiment_queue:dclicked")
def _dclicked_changed(self, new):
self.experiment_factory.run_factory.edit_mode = True
self._set_factory_runs(self.experiment_queue.selected)
@on_trait_change("experiment_factory:run_factory:update_info_needed")
def _refresh3(self):
self.debug("update info needed fired")
self.update_info()
@on_trait_change("executor:queue_modified")
def _refresh5(self, new):
if new:
self.debug("queue modified fired")
self.update_info()
@on_trait_change("experiment_factory:run_factory:refresh_table_needed")
def _refresh4(self):
for qi in self.experiment_queues:
qi.refresh_table_needed = True
@on_trait_change("experiment_factory:save_button")
def _save_update(self):
self.save_event = True
self.update_info()
@on_trait_change("experiment_queue:refresh_info_needed")
def _handle_refresh(self):
self.update_info()
@on_trait_change("experiment_queue:selected")
def _selected_changed(self, new):
ef = self.experiment_factory
rf = ef.run_factory
rf.edit_mode = False
if new:
self._set_factory_runs(new)
a = new[-1]
if not a.skip:
self.executor.stats.calculate_at(a, at_times=self.executor.is_alive())
@on_trait_change("experiment_factory:queue_factory:delay_between_analyses")
def handle_delay_between_analyses(self, new):
if self.executor.is_alive():
self.executor.experiment_queue.delay_between_analyses = new
def _set_factory_runs(self, new):
ef = self.experiment_factory
rf = ef.run_factory
rf.suppress_update = True
rf.set_selected_runs(new)
rf.suppress_update = False
def _executor_factory(self):
e = ExperimentExecutor(mode=self.mode, application=self.application)
e.bind_preferences()
return e
def _executor_default(self):
return self._executor_factory()
def _experiment_factory_default(self):
dms = "Spectrometer"
if self.application:
p2 = (
"pychron.spectrometer.base_spectrometer_manager.BaseSpectrometerManager"
)
spec = self.application.get_service(p2)
if spec:
dms = spec.name.capitalize()
e = ExperimentFactory(
application=self.application, dvc=self.dvc, default_mass_spectrometer=dms
)
return e
| true | true |
1c38463f66647da1607e6d5fc5115661bd36b39a | 5,398 | py | Python | backend/api/viewsets/model_year_report_consumer_sales.py | NavpreetGrewal/zeva | a2fc9241d471bb6a2099bcf48b9589ac9391cc64 | [
"Apache-2.0"
] | null | null | null | backend/api/viewsets/model_year_report_consumer_sales.py | NavpreetGrewal/zeva | a2fc9241d471bb6a2099bcf48b9589ac9391cc64 | [
"Apache-2.0"
] | null | null | null | backend/api/viewsets/model_year_report_consumer_sales.py | NavpreetGrewal/zeva | a2fc9241d471bb6a2099bcf48b9589ac9391cc64 | [
"Apache-2.0"
] | null | null | null | from rest_framework import mixins, viewsets
from rest_framework.response import Response
from django.db.models import Q
from django.shortcuts import get_object_or_404
from api.models.model_year_report_vehicle import ModelYearReportVehicle
from api.models.model_year_report import ModelYearReport
from api.models.model_year_report_confirmation import \
ModelYearReportConfirmation
from api.models.model_year_report_history import ModelYearReportHistory
from api.models.model_year import ModelYear
from api.models.model_year_report_ldv_sales import \
ModelYearReportLDVSales
from api.models.model_year_report_statuses import ModelYearReportStatuses
from api.permissions.model_year_report import ModelYearReportPermissions
from api.serializers.model_year_report_history import \
ModelYearReportHistorySerializer
from api.serializers.model_year_report_vehicle import \
ModelYearReportVehicleSerializer, ModelYearReportVehicleSaveSerializer
from api.serializers.model_year_report import ModelYearReportSerializer
from api.serializers.vehicle import ModelYearSerializer
from api.services.vehicle import vehicles_sales
from api.serializers.vehicle import VehicleSalesSerializer
class ModelYearReportConsumerSalesViewSet(mixins.ListModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
permission_classes = (ModelYearReportPermissions,)
http_method_names = ['get', 'post', 'put', 'patch']
queryset = ModelYearReport.objects.all()
serializer_classes = {
'default': ModelYearReportSerializer,
'create': ModelYearReportVehicleSaveSerializer,
}
def get_serializer_class(self):
if self.action in list(self.serializer_classes.keys()):
return self.serializer_classes[self.action]
return self.serializer_classes['default']
def create(self, request, *args, **kwargs):
vehicles = request.data.get('data')
model_year_report_id = request.data.get('model_year_report_id')
confirmations = request.data.get('confirmation')
report = ModelYearReport.objects.get(id=model_year_report_id)
"""
Save/Update vehicle information
"""
vehicles_delete = ModelYearReportVehicle.objects.filter(
model_year_report_id=model_year_report_id
)
vehicles_delete.delete()
for vehicle in vehicles:
serializer = ModelYearReportVehicleSaveSerializer(
data=vehicle,
context={'request': request}
)
serializer.is_valid(raise_exception=True)
model_year_report_vehicle = serializer.save()
"""
Save/Update confirmation
"""
for confirmation in confirmations:
confirmation_delete = ModelYearReportConfirmation.objects.filter(
signing_authority_assertion_id=confirmation).filter(
model_year_report=report)
confirmation_delete.delete()
consumer_sales_confirmation = ModelYearReportConfirmation.objects.create(
create_user=request.user.username,
model_year_report=report,
has_accepted=True,
title=request.user.title,
signing_authority_assertion_id=confirmation
)
consumer_sales_confirmation.save()
ModelYearReportHistory.objects.create(
model_year_report_id=model_year_report_id,
validation_status=ModelYearReportStatuses.DRAFT,
update_user=request.user.username,
create_user=request.user.username,
)
return Response(
{"status": "saved"}
)
def retrieve(self, request, pk):
vehicles = None
queryset = self.get_queryset()
report = get_object_or_404(queryset, pk=pk)
model_year = ModelYearSerializer(report.model_year)
organization = request.user.organization.id
summary_param = request.GET.get('summary', None)
summary = True if summary_param == "true" else None
confirmation = ModelYearReportConfirmation.objects.filter(
model_year_report_id=pk,
signing_authority_assertion__module="consumer_sales"
).values_list(
'signing_authority_assertion_id', flat=True
).distinct()
if not confirmation and not summary:
vehicle = vehicles_sales(model_year, organization)
vehicles_serializer = VehicleSalesSerializer(vehicle, many=True)
else:
vehicle = ModelYearReportVehicle.objects.filter(
model_year_report_id=report.id)
vehicles_serializer = ModelYearReportVehicleSerializer(
vehicle, many=True)
vehicles = vehicles_serializer.data
history_list = ModelYearReportHistory.objects.filter(
model_year_report_id=pk
)
history = ModelYearReportHistorySerializer(history_list, many=True)
return Response({
'vehicle_list': vehicles,
'model_year_report_history': history.data,
'confirmations': confirmation,
'organization_name': request.user.organization.name,
'validation_status': report.validation_status.value,
})
| 38.014085 | 85 | 0.686365 | from rest_framework import mixins, viewsets
from rest_framework.response import Response
from django.db.models import Q
from django.shortcuts import get_object_or_404
from api.models.model_year_report_vehicle import ModelYearReportVehicle
from api.models.model_year_report import ModelYearReport
from api.models.model_year_report_confirmation import \
ModelYearReportConfirmation
from api.models.model_year_report_history import ModelYearReportHistory
from api.models.model_year import ModelYear
from api.models.model_year_report_ldv_sales import \
ModelYearReportLDVSales
from api.models.model_year_report_statuses import ModelYearReportStatuses
from api.permissions.model_year_report import ModelYearReportPermissions
from api.serializers.model_year_report_history import \
ModelYearReportHistorySerializer
from api.serializers.model_year_report_vehicle import \
ModelYearReportVehicleSerializer, ModelYearReportVehicleSaveSerializer
from api.serializers.model_year_report import ModelYearReportSerializer
from api.serializers.vehicle import ModelYearSerializer
from api.services.vehicle import vehicles_sales
from api.serializers.vehicle import VehicleSalesSerializer
class ModelYearReportConsumerSalesViewSet(mixins.ListModelMixin,
mixins.CreateModelMixin,
viewsets.GenericViewSet):
permission_classes = (ModelYearReportPermissions,)
http_method_names = ['get', 'post', 'put', 'patch']
queryset = ModelYearReport.objects.all()
serializer_classes = {
'default': ModelYearReportSerializer,
'create': ModelYearReportVehicleSaveSerializer,
}
def get_serializer_class(self):
if self.action in list(self.serializer_classes.keys()):
return self.serializer_classes[self.action]
return self.serializer_classes['default']
def create(self, request, *args, **kwargs):
vehicles = request.data.get('data')
model_year_report_id = request.data.get('model_year_report_id')
confirmations = request.data.get('confirmation')
report = ModelYearReport.objects.get(id=model_year_report_id)
vehicles_delete = ModelYearReportVehicle.objects.filter(
model_year_report_id=model_year_report_id
)
vehicles_delete.delete()
for vehicle in vehicles:
serializer = ModelYearReportVehicleSaveSerializer(
data=vehicle,
context={'request': request}
)
serializer.is_valid(raise_exception=True)
model_year_report_vehicle = serializer.save()
for confirmation in confirmations:
confirmation_delete = ModelYearReportConfirmation.objects.filter(
signing_authority_assertion_id=confirmation).filter(
model_year_report=report)
confirmation_delete.delete()
consumer_sales_confirmation = ModelYearReportConfirmation.objects.create(
create_user=request.user.username,
model_year_report=report,
has_accepted=True,
title=request.user.title,
signing_authority_assertion_id=confirmation
)
consumer_sales_confirmation.save()
ModelYearReportHistory.objects.create(
model_year_report_id=model_year_report_id,
validation_status=ModelYearReportStatuses.DRAFT,
update_user=request.user.username,
create_user=request.user.username,
)
return Response(
{"status": "saved"}
)
def retrieve(self, request, pk):
vehicles = None
queryset = self.get_queryset()
report = get_object_or_404(queryset, pk=pk)
model_year = ModelYearSerializer(report.model_year)
organization = request.user.organization.id
summary_param = request.GET.get('summary', None)
summary = True if summary_param == "true" else None
confirmation = ModelYearReportConfirmation.objects.filter(
model_year_report_id=pk,
signing_authority_assertion__module="consumer_sales"
).values_list(
'signing_authority_assertion_id', flat=True
).distinct()
if not confirmation and not summary:
vehicle = vehicles_sales(model_year, organization)
vehicles_serializer = VehicleSalesSerializer(vehicle, many=True)
else:
vehicle = ModelYearReportVehicle.objects.filter(
model_year_report_id=report.id)
vehicles_serializer = ModelYearReportVehicleSerializer(
vehicle, many=True)
vehicles = vehicles_serializer.data
history_list = ModelYearReportHistory.objects.filter(
model_year_report_id=pk
)
history = ModelYearReportHistorySerializer(history_list, many=True)
return Response({
'vehicle_list': vehicles,
'model_year_report_history': history.data,
'confirmations': confirmation,
'organization_name': request.user.organization.name,
'validation_status': report.validation_status.value,
})
| true | true |
1c38486b5a0bdf40a741bd9dbc89b9eb9f489f78 | 1,828 | py | Python | modules/exploits/example.py | BA7JCM/metasploit-framework | a977d485084aefda76aeac2b15a8f5a4607938fc | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 2 | 2022-02-19T07:46:56.000Z | 2022-02-22T02:42:09.000Z | modules/exploits/example.py | BA7JCM/metasploit-framework | a977d485084aefda76aeac2b15a8f5a4607938fc | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 5 | 2020-10-08T15:04:39.000Z | 2020-10-08T15:04:46.000Z | modules/exploits/example.py | BA7JCM/metasploit-framework | a977d485084aefda76aeac2b15a8f5a4607938fc | [
"BSD-2-Clause",
"BSD-3-Clause"
] | 1 | 2022-03-01T05:18:40.000Z | 2022-03-01T05:18:40.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# standard modules
import logging
# extra modules
dependencies_missing = False
try:
import requests
except ImportError:
dependencies_missing = True
from metasploit import module
metadata = {
'name': 'Python Module Example',
'description': '''
Python communication with msfconsole.
''',
'authors': [
'Jacob Robles'
],
'date': '2018-03-22',
'license': 'MSF_LICENSE',
'references': [
{'type': 'url', 'ref': 'https://www.rapid7.com/blog/post/2017/12/28/regifting-python-in-metasploit/'},
{'type': 'aka', 'ref': 'Coldstone'}
],
'type': 'remote_exploit_cmd_stager',
'targets': [
{'platform':'linux', 'arch': 'x86'}
],
'payload': {
'command_stager_flavor': 'curl',
},
'options': {
'targeturi': {'type': 'string', 'description': 'The base path', 'required': True, 'default': '/'},
'rhost': {'type': 'address', 'description': 'Target address', 'required': True, 'default': None},
'command': {'type': 'string', 'description': 'The command to execute via the q GET parameter', 'required': True}
}
}
def run(args):
module.LogHandler.setup(msg_prefix='{} - '.format(args['rhost']))
if dependencies_missing:
logging.error('Module dependency (requests) is missing, cannot continue')
return
# Your code here
try:
# args['command'] is where the command stager command lives
r = requests.get('https://{}/{}/?q={}'.format(args['rhost'], args['targeturi'], args['command']), verify=False)
except requests.exceptions.RequestException as e:
logging.error('{}'.format(e))
return
logging.info('{}...'.format(r.text[0:50]))
if __name__ == '__main__':
module.run(metadata, run)
| 28.123077 | 120 | 0.599562 |
import logging
dependencies_missing = False
try:
import requests
except ImportError:
dependencies_missing = True
from metasploit import module
metadata = {
'name': 'Python Module Example',
'description': '''
Python communication with msfconsole.
''',
'authors': [
'Jacob Robles'
],
'date': '2018-03-22',
'license': 'MSF_LICENSE',
'references': [
{'type': 'url', 'ref': 'https://www.rapid7.com/blog/post/2017/12/28/regifting-python-in-metasploit/'},
{'type': 'aka', 'ref': 'Coldstone'}
],
'type': 'remote_exploit_cmd_stager',
'targets': [
{'platform':'linux', 'arch': 'x86'}
],
'payload': {
'command_stager_flavor': 'curl',
},
'options': {
'targeturi': {'type': 'string', 'description': 'The base path', 'required': True, 'default': '/'},
'rhost': {'type': 'address', 'description': 'Target address', 'required': True, 'default': None},
'command': {'type': 'string', 'description': 'The command to execute via the q GET parameter', 'required': True}
}
}
def run(args):
module.LogHandler.setup(msg_prefix='{} - '.format(args['rhost']))
if dependencies_missing:
logging.error('Module dependency (requests) is missing, cannot continue')
return
try:
r = requests.get('https://{}/{}/?q={}'.format(args['rhost'], args['targeturi'], args['command']), verify=False)
except requests.exceptions.RequestException as e:
logging.error('{}'.format(e))
return
logging.info('{}...'.format(r.text[0:50]))
if __name__ == '__main__':
module.run(metadata, run)
| true | true |
1c384892c1b89f4a0ea6806632905115f06fdd7f | 1,586 | py | Python | var/spack/repos/builtin/packages/sqlcipher/package.py | MiddelkoopT/spack | 4d94c4c4600f42a7a3bb3d06ec879140bc259304 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1 | 2021-02-08T15:05:27.000Z | 2021-02-08T15:05:27.000Z | var/spack/repos/builtin/packages/sqlcipher/package.py | MiddelkoopT/spack | 4d94c4c4600f42a7a3bb3d06ec879140bc259304 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/sqlcipher/package.py | MiddelkoopT/spack | 4d94c4c4600f42a7a3bb3d06ec879140bc259304 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1 | 2022-01-18T23:39:24.000Z | 2022-01-18T23:39:24.000Z | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class Sqlcipher(AutotoolsPackage):
"""SQLCipher is an SQLite extension that provides 256 bit AES encryption
of database files.
"""
homepage = "https://www.zetetic.net/sqlcipher/"
url = "https://github.com/sqlcipher/sqlcipher/archive/v4.4.1.tar.gz"
git = "https://github.com/sqlcipher/sqlcipher.git"
maintainers = ['rmsds']
version('4.4.1', sha256='a36ed7c879a5e9af1054942201c75fc56f1db22e46bf6c2bbae3975dfeb6782d')
version('4.4.0', sha256='0924b2ae1079717954498bda78a30de20ce2a6083076b16214a711567821d148')
version('4.3.0', sha256='fccb37e440ada898902b294d02cde7af9e8706b185d77ed9f6f4d5b18b4c305f')
version('4.2.0', sha256='105c1b813f848da038c03647a8bfc9d42fb46865e6aaf4edfd46ff3b18cdccfc')
version('4.1.0', sha256='65144ca3ba4c0f9cd4bae8c20bb42f2b84424bf29d1ebcf04c44a728903b1faa')
version('4.0.1', sha256='2f803017378c7479cb791be59b7bad8392a15acddbcc094e4433581fe421f4ca')
version('4.0.0', sha256='c8f5fc6d800aae6107bf23900144804db5510c2676c93fbb269e4a0700837d68')
version('3.4.2', sha256='69897a5167f34e8a84c7069f1b283aba88cdfa8ec183165c4a5da2c816cfaadb')
depends_on('openssl')
depends_on('tcl', type=['build', ])
depends_on('zlib')
def configure_args(self):
args = []
args.append('--enable-tempstore=yes')
args.append('CFLAGS=-DSQLITE_HAS_CODEC')
return args
| 46.647059 | 95 | 0.748424 |
class Sqlcipher(AutotoolsPackage):
homepage = "https://www.zetetic.net/sqlcipher/"
url = "https://github.com/sqlcipher/sqlcipher/archive/v4.4.1.tar.gz"
git = "https://github.com/sqlcipher/sqlcipher.git"
maintainers = ['rmsds']
version('4.4.1', sha256='a36ed7c879a5e9af1054942201c75fc56f1db22e46bf6c2bbae3975dfeb6782d')
version('4.4.0', sha256='0924b2ae1079717954498bda78a30de20ce2a6083076b16214a711567821d148')
version('4.3.0', sha256='fccb37e440ada898902b294d02cde7af9e8706b185d77ed9f6f4d5b18b4c305f')
version('4.2.0', sha256='105c1b813f848da038c03647a8bfc9d42fb46865e6aaf4edfd46ff3b18cdccfc')
version('4.1.0', sha256='65144ca3ba4c0f9cd4bae8c20bb42f2b84424bf29d1ebcf04c44a728903b1faa')
version('4.0.1', sha256='2f803017378c7479cb791be59b7bad8392a15acddbcc094e4433581fe421f4ca')
version('4.0.0', sha256='c8f5fc6d800aae6107bf23900144804db5510c2676c93fbb269e4a0700837d68')
version('3.4.2', sha256='69897a5167f34e8a84c7069f1b283aba88cdfa8ec183165c4a5da2c816cfaadb')
depends_on('openssl')
depends_on('tcl', type=['build', ])
depends_on('zlib')
def configure_args(self):
args = []
args.append('--enable-tempstore=yes')
args.append('CFLAGS=-DSQLITE_HAS_CODEC')
return args
| true | true |
1c3849e06afbbb9b21ae5df801c2962db406e6ba | 91,397 | py | Python | tests/test_bert_ner.py | bond005/elmo_ner | c6135cfca5d7bf817a22c8c8631e7f81f6f05f94 | [
"Apache-2.0"
] | 80 | 2019-03-21T13:04:32.000Z | 2021-09-27T16:53:34.000Z | tests/test_bert_ner.py | bond005/elmo_ner | c6135cfca5d7bf817a22c8c8631e7f81f6f05f94 | [
"Apache-2.0"
] | 7 | 2019-06-06T13:49:54.000Z | 2022-02-10T01:05:18.000Z | tests/test_bert_ner.py | bond005/elmo_ner | c6135cfca5d7bf817a22c8c8631e7f81f6f05f94 | [
"Apache-2.0"
] | 16 | 2019-03-20T06:54:40.000Z | 2021-09-23T17:40:24.000Z | import copy
import gc
import os
import pickle
import re
import sys
import tempfile
import unittest
import numpy as np
from sklearn.exceptions import NotFittedError
from spacy_udpipe.language import UDPipeLanguage
try:
from deep_ner.bert_ner import BERT_NER
from deep_ner.utils import load_dataset_from_json, set_total_seed
from deep_ner.quality import calculate_prediction_quality
from deep_ner.udpipe_data import UNIVERSAL_DEPENDENCIES, UNIVERSAL_POS_TAGS
except:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from deep_ner.bert_ner import BERT_NER
from deep_ner.utils import load_dataset_from_json, set_total_seed
from deep_ner.quality import calculate_prediction_quality
from deep_ner.udpipe_data import UNIVERSAL_DEPENDENCIES, UNIVERSAL_POS_TAGS
class TestBertNer(unittest.TestCase):
@classmethod
def setUpClass(cls):
set_total_seed(0)
def tearDown(self):
if hasattr(self, 'ner'):
del self.ner
if hasattr(self, 'another_ner'):
del self.another_ner
if hasattr(self, 'temp_file_name'):
if os.path.isfile(self.temp_file_name):
os.remove(self.temp_file_name)
def test_creation(self):
self.ner = BERT_NER(udpipe_lang='en')
self.assertIsInstance(self.ner, BERT_NER)
self.assertTrue(hasattr(self.ner, 'udpipe_lang'))
self.assertTrue(hasattr(self.ner, 'use_shapes'))
self.assertTrue(hasattr(self.ner, 'use_nlp_features'))
self.assertTrue(hasattr(self.ner, 'batch_size'))
self.assertTrue(hasattr(self.ner, 'lstm_units'))
self.assertTrue(hasattr(self.ner, 'lr'))
self.assertTrue(hasattr(self.ner, 'l2_reg'))
self.assertTrue(hasattr(self.ner, 'clip_norm'))
self.assertTrue(hasattr(self.ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.ner, 'finetune_bert'))
self.assertTrue(hasattr(self.ner, 'max_epochs'))
self.assertTrue(hasattr(self.ner, 'patience'))
self.assertTrue(hasattr(self.ner, 'random_seed'))
self.assertTrue(hasattr(self.ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.ner, 'max_seq_length'))
self.assertTrue(hasattr(self.ner, 'validation_fraction'))
self.assertTrue(hasattr(self.ner, 'verbose'))
self.assertIsInstance(self.ner.batch_size, int)
self.assertIsInstance(self.ner.lstm_units, int)
self.assertIsInstance(self.ner.lr, float)
self.assertIsInstance(self.ner.l2_reg, float)
self.assertIsInstance(self.ner.clip_norm, float)
self.assertIsInstance(self.ner.bert_hub_module_handle, str)
self.assertIsInstance(self.ner.udpipe_lang, str)
self.assertIsInstance(self.ner.finetune_bert, bool)
self.assertIsInstance(self.ner.max_epochs, int)
self.assertIsInstance(self.ner.patience, int)
self.assertIsNone(self.ner.random_seed)
self.assertIsInstance(self.ner.gpu_memory_frac, float)
self.assertIsInstance(self.ner.max_seq_length, int)
self.assertIsInstance(self.ner.validation_fraction, float)
self.assertIsInstance(self.ner.verbose, bool)
self.assertIsInstance(self.ner.use_shapes, bool)
self.assertIsInstance(self.ner.use_nlp_features, bool)
def test_check_params_positive(self):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1', finetune_bert=True,
batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.0,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=None,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
self.assertTrue(True)
def test_check_params_negative001(self):
true_err_msg = re.escape('`bert_hub_module_handle` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=False, udpipe_lang='en'
)
def test_check_params_negative002(self):
true_err_msg = re.escape('`bert_hub_module_handle` is wrong! Expected `{0}`, got `{1}`.'.format(
type('abc'), type(123)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle=1, finetune_bert=True,
batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=False, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative003(self):
true_err_msg = re.escape('`batch_size` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative004(self):
true_err_msg = re.escape('`batch_size` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size='32', max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative005(self):
true_err_msg = re.escape('`batch_size` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=-3, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative006(self):
true_err_msg = re.escape('`max_epochs` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative007(self):
true_err_msg = re.escape('`max_epochs` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs='10', patience=3, gpu_memory_frac=1.0, verbose=False,
random_seed=42, lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative008(self):
true_err_msg = re.escape('`max_epochs` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=-3, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative009(self):
true_err_msg = re.escape('`patience` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative010(self):
true_err_msg = re.escape('`patience` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience='3', gpu_memory_frac=1.0, verbose=False,
random_seed=42, lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative011(self):
true_err_msg = re.escape('`patience` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=-3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative012(self):
true_err_msg = re.escape('`max_seq_length` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative013(self):
true_err_msg = re.escape('`max_seq_length` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length='512', lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative014(self):
true_err_msg = re.escape('`max_seq_length` is wrong! Expected a positive integer value, but -3 is not '
'positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=-3, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative015(self):
true_err_msg = re.escape('`validation_fraction` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative016(self):
true_err_msg = re.escape('`validation_fraction` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction='0.1', max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False,
random_seed=42, lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative017(self):
true_err_msg = '`validation_fraction` is wrong! Expected a positive floating-point value greater than or ' \
'equal to 0.0, but {0} is not positive.'.format(-0.1)
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=-0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative018(self):
true_err_msg = '`validation_fraction` is wrong! Expected a positive floating-point value less than 1.0, but ' \
'{0} is not less than 1.0.'.format(1.1)
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=1.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative019(self):
true_err_msg = re.escape('`gpu_memory_frac` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative020(self):
true_err_msg = re.escape('`gpu_memory_frac` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac='1.0', verbose=False,
random_seed=42, lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative021(self):
true_err_msg = re.escape('`gpu_memory_frac` is wrong! Expected a floating-point value in the (0.0, 1.0], '
'but {0} is not proper.'.format(-1.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=-1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative022(self):
true_err_msg = re.escape('`gpu_memory_frac` is wrong! Expected a floating-point value in the (0.0, 1.0], '
'but {0} is not proper.'.format(1.3))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.3, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative023(self):
true_err_msg = re.escape('`lr` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative024(self):
true_err_msg = re.escape('`lr` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr='1e-3', l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative025(self):
true_err_msg = re.escape('`lr` is wrong! Expected a positive floating-point value, but {0} is not '
'positive.'.format(0.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=0.0, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative026(self):
true_err_msg = re.escape('`lr` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative027(self):
true_err_msg = re.escape('`lr` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr='1e-3', l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative028(self):
true_err_msg = re.escape('`lr` is wrong! Expected a positive floating-point value, but {0} is not '
'positive.'.format(0.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=0.0, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative029(self):
true_err_msg = re.escape('`l2_reg` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative030(self):
true_err_msg = re.escape('`l2_reg` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg='1e-4', clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative031(self):
true_err_msg = re.escape('`l2_reg` is wrong! Expected a non-negative floating-point value, but {0} is '
'negative.'.format(-2.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=-2.0, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative032(self):
true_err_msg = re.escape('`finetune_bert` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, validation_fraction=0.1, clip_norm=5.0,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative033(self):
true_err_msg = re.escape('`finetune_bert` is wrong! Expected `{0}`, got `{1}`.'.format(
type(True), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert='True', batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative034(self):
true_err_msg = re.escape('`verbose` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative035(self):
true_err_msg = re.escape('`verbose` is wrong! Expected `{0}`, got `{1}`.'.format(
type(True), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose='False',
random_seed=42, lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative036(self):
true_err_msg = re.escape('`lstm_units` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative037(self):
true_err_msg = re.escape('`lstm_units` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
lstm_units='128', finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4,
clip_norm=5.0, validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False,
random_seed=42, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative038(self):
true_err_msg = re.escape('`lstm_units` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=-3, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_X_positive(self):
X = ['abc', 'defgh', '4wdffg']
BERT_NER.check_X(X, 'X_train')
self.assertTrue(True)
def test_check_X_negative01(self):
X = {'abc', 'defgh', '4wdffg'}
true_err_msg = re.escape('`X_train` is wrong, because it is not list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_X(X, 'X_train')
def test_check_X_negative02(self):
X = np.random.uniform(-1.0, 1.0, (10, 2))
true_err_msg = re.escape('`X_train` is wrong, because it is not 1-D list!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_X(X, 'X_train')
def test_check_X_negative03(self):
X = ['abc', 23, '4wdffg']
true_err_msg = re.escape('Item 1 of `X_train` is wrong, because it is not string-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_X(X, 'X_train')
def text_check_Xy_positive(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_classes_list = ('LOC', 'ORG', 'PER')
self.assertEqual(true_classes_list, BERT_NER.check_Xy(X, 'X_train', y, 'y_train'))
def text_check_Xy_negative01(self):
X = {
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
}
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('`X_train` is wrong, because it is not list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative02(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = {
'1': {
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
'2': {
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
}
true_err_msg = re.escape('`y_train` is wrong, because it is not a list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative03(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = np.random.uniform(-1.0, 1.0, (10, 2))
true_err_msg = re.escape('`y_train` is wrong, because it is not 1-D list!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative04(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
},
{
'LOC': [(17, 24), (117, 130)]
}
]
true_err_msg = re.escape('Length of `X_train` does not correspond to length of `y_train`! 2 != 3')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative05(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
4
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because it is not a dictionary-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative06(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
1: [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because its key `1` is not a string-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative07(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'O': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because its key `O` incorrectly specifies a named '
'entity!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative08(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'123': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because its key `123` incorrectly specifies a named '
'entity!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative09(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'loc': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because its key `loc` incorrectly specifies a named '
'entity!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative10(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': {1, 2}
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because its value `{0}` is not a list-like '
'object!'.format(y[0]['PER']))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative11(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), 63],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because named entity bounds `63` are not specified as '
'list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative12(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77, 81)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because named entity bounds `{0}` are not specified as '
'2-D list!'.format((63, 77, 81)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative13(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (219, 196)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because named entity bounds `{0}` are '
'incorrect!'.format((219, 196)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative14(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 519)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because named entity bounds `{0}` are '
'incorrect!'.format((196, 519)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative15(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(-1, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because named entity bounds `{0}` are '
'incorrect!'.format((-1, 137)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def test_detect_token_labels_positive01(self):
# source_text = 'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози.'
tokenized_text = ['Ба', '##рак', 'Об', '##ама', 'принимает', 'в', 'Б', '##елом', 'доме', 'своего',
'французского', 'кол', '##ле', '##гу', 'Н', '##ико', '##ля', 'Са', '##рко', '##зи', '.']
token_bounds = [(0, 2), (2, 5), (6, 8), (8, 11), (12, 21), (22, 23), (24, 25), (25, 29), (30, 34), (35, 41),
(42, 54), (55, 58), (58, 60), (60, 62), (63, 64), (64, 67), (67, 69), (70, 72), (72, 75),
(75, 77), (77, 78)]
indices_of_named_entities = np.array(
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 0],
dtype=np.int32
)
label_IDs = {1: 1, 2: 2, 3: 1}
y_true = np.array(
[0, 2, 1, 1, 1, 0, 0, 4, 3, 3, 0, 0, 0, 0, 0, 2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
y_pred = BERT_NER.detect_token_labels(tokenized_text, token_bounds, indices_of_named_entities, label_IDs, 32)
self.assertIsInstance(y_pred, np.ndarray)
self.assertEqual(y_true.shape, y_pred.shape)
self.assertEqual(y_true.tolist(), y_pred.tolist())
def test_detect_token_labels_positive02(self):
# source_text = 'С 1876 г Павлов ассистирует профессору К. Н. Устимовичу в Медико-хирургической академии и ' \
# 'параллельно изучает физиологию кровообращения.'
tokenized_text = ['С', '1876', 'г', 'Павло', '##в', 'а', '##сси', '##сти', '##рует', 'профессор', '##у', 'К',
'.', 'Н', '.', 'У', '##сти', '##мов', '##ич', '##у', 'в', 'М', '##еди', '##ко', '-',
'х', '##ир', '##ург', '##ической', 'академии', 'и', 'пара', '##лл', '##ельно',
'из', '##уч', '##ает', 'ф', '##из', '##ио', '##логи', '##ю',
'к', '##рово', '##об', '##ращения', '.']
token_bounds = [(0, 1), (2, 6), (7, 8), (9, 14), (14, 15), (16, 17), (17, 20), (20, 23), (23, 27), (28, 37),
(37, 38), (39, 40), (40, 41), (42, 43), (43, 44), (45, 46), (46, 49), (49, 52), (52, 54),
(54, 55), (56, 57), (58, 59), (59, 62), (62, 64), (64, 65), (65, 66), (66, 68), (68, 71),
(71, 78), (79, 87), (88, 89), (90, 94), (94, 96), (96, 101), (102, 104), (104, 106), (106, 109),
(110, 111), (111, 113), (113, 115), (115, 119), (119, 120), (121, 122), (122, 126), (126, 128),
(128, 135), (135, 136)]
indices_of_named_entities = np.array(
[0, 0, 1, 1, 1, 1, 1, 1, 0, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
label_IDs = {1: 1, 2: 2, 3: 3, 4: 2, 5: 4}
y_true = np.array(
[0, 0, 2, 1, 4, 3, 0, 0, 0, 0, 6, 5, 4, 3, 3, 3, 3, 3, 3, 3, 3, 0, 8, 7, 7, 7, 7, 7, 7, 7, 7, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
y_pred = BERT_NER.detect_token_labels(tokenized_text, token_bounds, indices_of_named_entities, label_IDs, 64)
self.assertIsInstance(y_pred, np.ndarray)
self.assertEqual(y_true.shape, y_pred.shape)
self.assertEqual(y_true.tolist(), y_pred.tolist())
def test_detect_token_labels_positive03(self):
# source_text = 'Весной 1890 года Варшавский и Томский университеты избирают его профессором.'
tokenized_text = ['В', '##есной', '1890', 'года', 'В', '##ар', '##ша', '##вский', 'и', 'Томск', '##ий',
'университет', '##ы', 'из', '##бира', '##ют', 'его', 'профессором', '.']
token_bounds = [(0, 1), (1, 6), (7, 11), (12, 16), (17, 18), (18, 20), (20, 22), (22, 27), (28, 29), (30, 35),
(35, 37), (38, 49), (49, 50), (51, 52), (53, 57), (57, 59), (60, 63), (64, 75), (75, 76)]
indices_of_named_entities = np.array(
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0],
dtype=np.int32
)
label_IDs = {1: 1, 2: 2, 3: 2}
y_true = np.array(
[0, 2, 1, 1, 1, 4, 3, 3, 3, 3, 4, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
y_pred = BERT_NER.detect_token_labels(tokenized_text, token_bounds, indices_of_named_entities, label_IDs, 32)
self.assertIsInstance(y_pred, np.ndarray)
self.assertEqual(y_true.shape, y_pred.shape)
self.assertEqual(y_true.tolist(), y_pred.tolist())
def test_calculate_indices_of_named_entities(self):
source_text = 'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози.'
classes_list = ('LOCATION', 'ORG', 'PERSON')
named_entities = {'PERSON': [(0, 11), (63, 77)], 'LOCATION': [(24, 34)]}
true_indices = np.array(
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 0],
dtype=np.int32
)
true_labels_to_classes = {1: 1, 2: 3, 3: 3}
indices, labels_to_classes = BERT_NER.calculate_indices_of_named_entities(source_text, classes_list,
named_entities)
self.assertIsInstance(indices, np.ndarray)
self.assertIsInstance(labels_to_classes, dict)
self.assertEqual(true_indices.shape, indices.shape)
self.assertEqual(true_indices.tolist(), indices.tolist())
self.assertEqual(set(true_labels_to_classes.keys()), set(labels_to_classes.keys()))
for label_ID in true_labels_to_classes:
self.assertEqual(true_labels_to_classes[label_ID], labels_to_classes[label_ID])
def test_fit_positive01(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, lstm_units=32, udpipe_lang='ru')
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'udpipe_lang'))
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertTrue(hasattr(res, 'use_shapes'))
self.assertTrue(hasattr(res, 'use_nlp_features'))
self.assertIsInstance(res.udpipe_lang, str)
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertIsInstance(res.use_shapes, bool)
self.assertIsInstance(res.use_nlp_features, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'shapes_list_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertTrue(hasattr(res, 'universal_pos_tags_dict_'))
self.assertTrue(hasattr(res, 'universal_dependencies_dict_'))
self.assertTrue(hasattr(res, 'nlp_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
self.assertIsInstance(res.shapes_list_, tuple)
self.assertGreater(len(res.shapes_list_), 3)
self.assertEqual(res.shapes_list_[-3:], ('[CLS]', '[SEP]', '[UNK]'))
self.assertIsInstance(res.universal_pos_tags_dict_, dict)
self.assertIsInstance(res.universal_dependencies_dict_, dict)
self.assertIsInstance(res.nlp_, UDPipeLanguage)
self.assertEqual(len(res.universal_pos_tags_dict_), len(UNIVERSAL_POS_TAGS))
self.assertEqual(len(res.universal_dependencies_dict_), len(UNIVERSAL_DEPENDENCIES))
def test_fit_positive02(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=True, max_epochs=3, batch_size=2, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=42, lstm_units=32, udpipe_lang='ru')
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'udpipe_lang'))
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertTrue(hasattr(res, 'use_shapes'))
self.assertTrue(hasattr(res, 'use_nlp_features'))
self.assertIsInstance(res.udpipe_lang, str)
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertIsInstance(res.use_shapes, bool)
self.assertIsInstance(res.use_nlp_features, bool)
self.assertEqual(res.random_seed, 42)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'shapes_list_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
self.assertIsInstance(res.shapes_list_, tuple)
self.assertGreater(len(res.shapes_list_), 3)
self.assertEqual(res.shapes_list_[-3:], ('[CLS]', '[SEP]', '[UNK]'))
def test_fit_positive03(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, lstm_units=None, clip_norm=None,
udpipe_lang='ru', use_shapes=False, use_nlp_features=True)
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'udpipe_lang'))
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertTrue(hasattr(res, 'use_shapes'))
self.assertTrue(hasattr(res, 'use_nlp_features'))
self.assertIsInstance(res.udpipe_lang, str)
self.assertIsInstance(res.batch_size, int)
self.assertIsNone(res.lstm_units)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsNone(res.clip_norm, None)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertIsInstance(res.use_shapes, bool)
self.assertIsInstance(res.use_nlp_features, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'shapes_list_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
self.assertIsInstance(res.shapes_list_, tuple)
self.assertGreater(len(res.shapes_list_), 3)
self.assertEqual(res.shapes_list_[-3:], ('[CLS]', '[SEP]', '[UNK]'))
def test_fit_predict(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=5, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=42, udpipe_lang='ru', use_shapes=True,
use_nlp_features=False)
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'udpipe_lang'))
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertTrue(hasattr(res, 'use_shapes'))
self.assertTrue(hasattr(res, 'use_nlp_features'))
self.assertIsInstance(res.udpipe_lang, str)
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertIsInstance(res.use_shapes, bool)
self.assertIsInstance(res.use_nlp_features, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'shapes_list_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
self.assertIsInstance(res.shapes_list_, tuple)
self.assertGreater(len(res.shapes_list_), 3)
self.assertEqual(res.shapes_list_[-3:], ('[CLS]', '[SEP]', '[UNK]'))
y_pred = res.predict(X_train)
self.assertIsInstance(y_pred, list)
self.assertEqual(len(X_train), len(y_pred))
for sample_idx in range(len(y_pred)):
self.assertIsInstance(y_pred[sample_idx], dict)
f1, precision, recall, _ = calculate_prediction_quality(y_train, y_pred, res.classes_list_)
self.assertGreater(f1, 0.0)
self.assertGreater(precision, 0.0)
self.assertGreater(recall, 0.0)
def test_predict_negative(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, random_seed=None, udpipe_lang='ru')
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
with self.assertRaises(NotFittedError):
_ = self.ner.predict(X_train)
def test_tokenize_all_01(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=1, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, udpipe_lang='ru', use_nlp_features=True,
use_shapes=True)
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
self.ner.fit(X_train, y_train)
res = self.ner.tokenize_all(X_train, y_train, shapes_vocabulary=self.ner.shapes_list_)
self.assertIsInstance(res, tuple)
self.assertEqual(len(res), 4)
X_train_tokenized, y_train_tokenized, shapes_list, bounds_of_tokens_for_training = res
self.assertIsInstance(X_train_tokenized, list)
self.assertIsInstance(y_train_tokenized, np.ndarray)
self.assertIs(self.ner.shapes_list_, shapes_list)
self.assertIsInstance(bounds_of_tokens_for_training, np.ndarray)
self.assertEqual(len(X_train_tokenized), 4)
self.assertEqual(y_train_tokenized.shape, (len(y_train), self.ner.max_seq_length))
for data_idx in range(3):
self.assertIsInstance(X_train_tokenized[data_idx], np.ndarray)
self.assertEqual(X_train_tokenized[data_idx].shape, (len(X_train), self.ner.max_seq_length))
data_idx = 3
self.assertIsInstance(X_train_tokenized[data_idx], np.ndarray)
self.assertEqual(len(X_train_tokenized[data_idx].shape), 3)
self.assertEqual(X_train_tokenized[data_idx].shape[0], len(X_train))
self.assertEqual(X_train_tokenized[data_idx].shape[1], self.ner.max_seq_length)
self.assertGreater(X_train_tokenized[data_idx].shape[2],
4 + len(UNIVERSAL_POS_TAGS) + len(UNIVERSAL_DEPENDENCIES))
for sample_idx in range(X_train_tokenized[data_idx].shape[0]):
n = 0
for token_idx in range(X_train_tokenized[data_idx].shape[1]):
if X_train_tokenized[data_idx][sample_idx][token_idx].sum() < 1e-3:
break
n += 1
self.assertGreater(n, 0, msg='Sample {0}: additional features are not defined!'.format(sample_idx))
for token_idx in range(n):
self.assertAlmostEqual(X_train_tokenized[data_idx][sample_idx][token_idx][0:4].sum(), 1.0,
msg='Sample {0}, token {1}: additional features are wrong!'.format(
sample_idx, token_idx))
for token_idx in range(1, n - 1):
start_pos = 4
end_pos = 4 + len(UNIVERSAL_POS_TAGS)
self.assertAlmostEqual(X_train_tokenized[data_idx][sample_idx][token_idx][start_pos:end_pos].sum(), 1.0,
msg='Sample {0}, token {1}: part of speech is not defined!'.format(
sample_idx, token_idx))
start_pos = 4 + len(UNIVERSAL_POS_TAGS)
end_pos = 4 + len(UNIVERSAL_POS_TAGS) + len(UNIVERSAL_DEPENDENCIES)
self.assertGreaterEqual(X_train_tokenized[data_idx][sample_idx][token_idx][start_pos:end_pos].sum(),
1.0,
msg='Sample {0}, token {1}: dependency tag is not defined!'.format(
sample_idx, token_idx))
def test_tokenize_all_02(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=1, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, udpipe_lang='ru', use_shapes=False,
use_nlp_features=False)
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
self.ner.fit(X_train, y_train)
res = self.ner.tokenize_all(X_train, y_train, shapes_vocabulary=self.ner.shapes_list_)
self.assertIsInstance(res, tuple)
self.assertEqual(len(res), 4)
X_train_tokenized, y_train_tokenized, shapes_list, bounds_of_tokens_for_training = res
self.assertIsInstance(X_train_tokenized, list)
self.assertIsInstance(y_train_tokenized, np.ndarray)
self.assertIs(self.ner.shapes_list_, shapes_list)
self.assertIsInstance(bounds_of_tokens_for_training, np.ndarray)
self.assertEqual(len(X_train_tokenized), 3)
self.assertEqual(y_train_tokenized.shape, (len(y_train), self.ner.max_seq_length))
for data_idx in range(3):
self.assertIsInstance(X_train_tokenized[data_idx], np.ndarray)
self.assertEqual(X_train_tokenized[data_idx].shape, (len(X_train), self.ner.max_seq_length))
def test_serialize_positive01(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=5, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=42, udpipe_lang='ru')
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'udpipe_lang'))
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertTrue(hasattr(res, 'use_shapes'))
self.assertTrue(hasattr(res, 'use_nlp_features'))
self.assertIsInstance(res.udpipe_lang, str)
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertIsInstance(res.use_shapes, bool)
self.assertIsInstance(res.use_nlp_features, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'shapes_list_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
self.assertIsInstance(res.shapes_list_, tuple)
self.assertGreater(len(res.shapes_list_), 3)
self.assertEqual(res.shapes_list_[-3:], ('[CLS]', '[SEP]', '[UNK]'))
y_pred1 = res.predict(X_train)
self.assertIsInstance(y_pred1, list)
self.assertEqual(len(X_train), len(y_pred1))
for sample_idx in range(len(y_pred1)):
self.assertIsInstance(y_pred1[sample_idx], dict)
f1, precision, recall, _ = calculate_prediction_quality(y_train, y_pred1, res.classes_list_)
self.assertGreater(f1, 0.0)
self.assertGreater(precision, 0.0)
self.assertGreater(recall, 0.0)
with tempfile.NamedTemporaryFile(mode='w', delete=True) as fp:
self.temp_file_name = fp.name
with open(self.temp_file_name, mode='wb') as fp:
pickle.dump(res, fp)
del res, self.ner
gc.collect()
with open(self.temp_file_name, mode='rb') as fp:
self.ner = pickle.load(fp)
y_pred2 = self.ner.predict(X_train)
self.assertIsInstance(y_pred2, list)
self.assertEqual(len(y_pred2), len(y_pred2))
for sample_idx in range(len(y_pred2)):
self.assertIsInstance(y_pred2[sample_idx], dict)
self.assertEqual(set(y_pred1[sample_idx]), set(y_pred2[sample_idx]))
for ne_type in y_pred1[sample_idx]:
self.assertEqual(y_pred1[sample_idx][ne_type], y_pred2[sample_idx][ne_type])
def test_serialize_positive02(self):
self.ner = BERT_NER(random_seed=31, udpipe_lang='ru')
old_udpipe_lang = self.ner.udpipe_lang
old_batch_size = self.ner.batch_size
old_lstm_units = self.ner.lstm_units
old_lr = self.ner.lr
old_l2_reg = self.ner.l2_reg
old_clip_norm = self.ner.clip_norm
old_bert_hub_module_handle = self.ner.bert_hub_module_handle
old_finetune_bert = self.ner.finetune_bert
old_max_epochs = self.ner.max_epochs
old_patience = self.ner.patience
old_random_seed = self.ner.random_seed
old_gpu_memory_frac = self.ner.gpu_memory_frac
old_max_seq_length = self.ner.max_seq_length
old_validation_fraction = self.ner.validation_fraction
old_verbose = self.ner.verbose
old_use_shapes = self.ner.use_shapes
old_use_nlp_features = self.ner.use_nlp_features
with tempfile.NamedTemporaryFile(mode='w', delete=True) as fp:
self.temp_file_name = fp.name
with open(self.temp_file_name, mode='wb') as fp:
pickle.dump(self.ner, fp)
del self.ner
gc.collect()
with open(self.temp_file_name, mode='rb') as fp:
self.ner = pickle.load(fp)
self.assertIsInstance(self.ner, BERT_NER)
self.assertTrue(hasattr(self.ner, 'udpipe_lang'))
self.assertTrue(hasattr(self.ner, 'batch_size'))
self.assertTrue(hasattr(self.ner, 'lstm_units'))
self.assertTrue(hasattr(self.ner, 'lr'))
self.assertTrue(hasattr(self.ner, 'l2_reg'))
self.assertTrue(hasattr(self.ner, 'clip_norm'))
self.assertTrue(hasattr(self.ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.ner, 'finetune_bert'))
self.assertTrue(hasattr(self.ner, 'max_epochs'))
self.assertTrue(hasattr(self.ner, 'patience'))
self.assertTrue(hasattr(self.ner, 'random_seed'))
self.assertTrue(hasattr(self.ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.ner, 'max_seq_length'))
self.assertTrue(hasattr(self.ner, 'validation_fraction'))
self.assertTrue(hasattr(self.ner, 'verbose'))
self.assertTrue(hasattr(self.ner, 'use_shapes'))
self.assertTrue(hasattr(self.ner, 'use_nlp_features'))
self.assertEqual(self.ner.udpipe_lang, old_udpipe_lang)
self.assertEqual(self.ner.batch_size, old_batch_size)
self.assertEqual(self.ner.lstm_units, old_lstm_units)
self.assertAlmostEqual(self.ner.lr, old_lr)
self.assertAlmostEqual(self.ner.l2_reg, old_l2_reg)
self.assertAlmostEqual(self.ner.clip_norm, old_clip_norm)
self.assertEqual(self.ner.bert_hub_module_handle, old_bert_hub_module_handle)
self.assertEqual(self.ner.finetune_bert, old_finetune_bert)
self.assertEqual(self.ner.max_epochs, old_max_epochs)
self.assertEqual(self.ner.patience, old_patience)
self.assertAlmostEqual(self.ner.gpu_memory_frac, old_gpu_memory_frac)
self.assertEqual(self.ner.max_seq_length, old_max_seq_length)
self.assertAlmostEqual(self.ner.validation_fraction, old_validation_fraction)
self.assertEqual(self.ner.verbose, old_verbose)
self.assertEqual(self.ner.use_shapes, old_use_shapes)
self.assertEqual(self.ner.use_nlp_features, old_use_nlp_features)
self.assertEqual(self.ner.random_seed, old_random_seed)
def test_copy_positive01(self):
self.ner = BERT_NER(random_seed=0, udpipe_lang='ru', use_shapes=False, use_nlp_features=True)
self.another_ner = copy.copy(self.ner)
self.assertIsInstance(self.another_ner, BERT_NER)
self.assertIsNot(self.ner, self.another_ner)
self.assertTrue(hasattr(self.another_ner, 'udpipe_lang'))
self.assertTrue(hasattr(self.another_ner, 'batch_size'))
self.assertTrue(hasattr(self.another_ner, 'lstm_units'))
self.assertTrue(hasattr(self.another_ner, 'lr'))
self.assertTrue(hasattr(self.another_ner, 'l2_reg'))
self.assertTrue(hasattr(self.another_ner, 'clip_norm'))
self.assertTrue(hasattr(self.another_ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.another_ner, 'finetune_bert'))
self.assertTrue(hasattr(self.another_ner, 'max_epochs'))
self.assertTrue(hasattr(self.another_ner, 'patience'))
self.assertTrue(hasattr(self.another_ner, 'random_seed'))
self.assertTrue(hasattr(self.another_ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.another_ner, 'max_seq_length'))
self.assertTrue(hasattr(self.another_ner, 'validation_fraction'))
self.assertTrue(hasattr(self.another_ner, 'verbose'))
self.assertTrue(hasattr(self.another_ner, 'use_shapes'))
self.assertTrue(hasattr(self.another_ner, 'use_nlp_features'))
self.assertEqual(self.ner.udpipe_lang, self.another_ner.udpipe_lang)
self.assertEqual(self.ner.batch_size, self.another_ner.batch_size)
self.assertEqual(self.ner.lstm_units, self.another_ner.lstm_units)
self.assertAlmostEqual(self.ner.lr, self.another_ner.lr)
self.assertAlmostEqual(self.ner.l2_reg, self.another_ner.l2_reg)
self.assertAlmostEqual(self.ner.clip_norm, self.another_ner.clip_norm)
self.assertEqual(self.ner.bert_hub_module_handle, self.another_ner.bert_hub_module_handle)
self.assertEqual(self.ner.finetune_bert, self.another_ner.finetune_bert)
self.assertEqual(self.ner.max_epochs, self.another_ner.max_epochs)
self.assertEqual(self.ner.patience, self.another_ner.patience)
self.assertEqual(self.ner.random_seed, self.another_ner.random_seed)
self.assertAlmostEqual(self.ner.gpu_memory_frac, self.another_ner.gpu_memory_frac)
self.assertEqual(self.ner.max_seq_length, self.another_ner.max_seq_length)
self.assertAlmostEqual(self.ner.validation_fraction, self.another_ner.validation_fraction)
self.assertEqual(self.ner.verbose, self.another_ner.verbose)
self.assertEqual(self.ner.use_shapes, self.another_ner.use_shapes)
self.assertEqual(self.ner.use_nlp_features, self.another_ner.use_nlp_features)
def test_copy_positive02(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, udpipe_lang='ru')
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
self.ner.fit(X_train, y_train)
self.another_ner = copy.copy(self.ner)
self.assertIsInstance(self.another_ner, BERT_NER)
self.assertIsNot(self.ner, self.another_ner)
self.assertTrue(hasattr(self.another_ner, 'udpipe_lang'))
self.assertTrue(hasattr(self.another_ner, 'batch_size'))
self.assertTrue(hasattr(self.another_ner, 'lstm_units'))
self.assertTrue(hasattr(self.another_ner, 'lr'))
self.assertTrue(hasattr(self.another_ner, 'l2_reg'))
self.assertTrue(hasattr(self.another_ner, 'clip_norm'))
self.assertTrue(hasattr(self.another_ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.another_ner, 'finetune_bert'))
self.assertTrue(hasattr(self.another_ner, 'max_epochs'))
self.assertTrue(hasattr(self.another_ner, 'patience'))
self.assertTrue(hasattr(self.another_ner, 'random_seed'))
self.assertTrue(hasattr(self.another_ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.another_ner, 'max_seq_length'))
self.assertTrue(hasattr(self.another_ner, 'validation_fraction'))
self.assertTrue(hasattr(self.another_ner, 'verbose'))
self.assertTrue(hasattr(self.another_ner, 'use_shapes'))
self.assertTrue(hasattr(self.another_ner, 'use_nlp_features'))
self.assertTrue(hasattr(self.another_ner, 'classes_list_'))
self.assertTrue(hasattr(self.another_ner, 'shapes_list_'))
self.assertTrue(hasattr(self.another_ner, 'tokenizer_'))
self.assertTrue(hasattr(self.another_ner, 'sess_'))
self.assertEqual(self.ner.udpipe_lang, self.another_ner.udpipe_lang)
self.assertEqual(self.ner.batch_size, self.another_ner.batch_size)
self.assertEqual(self.ner.lstm_units, self.another_ner.lstm_units)
self.assertAlmostEqual(self.ner.lr, self.another_ner.lr)
self.assertAlmostEqual(self.ner.l2_reg, self.another_ner.l2_reg)
self.assertAlmostEqual(self.ner.clip_norm, self.another_ner.clip_norm)
self.assertEqual(self.ner.bert_hub_module_handle, self.another_ner.bert_hub_module_handle)
self.assertEqual(self.ner.finetune_bert, self.another_ner.finetune_bert)
self.assertEqual(self.ner.max_epochs, self.another_ner.max_epochs)
self.assertEqual(self.ner.patience, self.another_ner.patience)
self.assertEqual(self.ner.random_seed, self.another_ner.random_seed)
self.assertAlmostEqual(self.ner.gpu_memory_frac, self.another_ner.gpu_memory_frac)
self.assertEqual(self.ner.max_seq_length, self.another_ner.max_seq_length)
self.assertAlmostEqual(self.ner.validation_fraction, self.another_ner.validation_fraction)
self.assertEqual(self.ner.verbose, self.another_ner.verbose)
self.assertEqual(self.ner.use_shapes, self.another_ner.use_shapes)
self.assertEqual(self.ner.use_nlp_features, self.another_ner.use_nlp_features)
self.assertIs(self.ner.classes_list_, self.another_ner.classes_list_)
self.assertIs(self.ner.shapes_list_, self.another_ner.shapes_list_)
self.assertIs(self.ner.tokenizer_, self.another_ner.tokenizer_)
self.assertIs(self.ner.sess_, self.another_ner.sess_)
def test_calculate_bounds_of_named_entities(self):
bounds_of_tokens = [(0, 2), (2, 5), (5, 8), (8, 10), (11, 16), (17, 20), (20, 22), (22, 26), (26, 27), (28, 31),
(31, 34), (34, 37), (38, 48), (49, 52), (52, 54), (55, 57), (58, 59), (59, 61), (61, 63),
(64, 70), (71, 83), (84, 87), (87, 90), (90, 93), (93, 95), (95, 98), (98, 99)]
classes_list = ('LOCATION', 'ORG', 'PERSON')
labels_of_tokens = [0, 0, 2, 1, 1, 2, 1, 0, 0, 0, 4, 3, 0, 6, 5, 5, 5, 0, 5, 5, 0, 2, 2, 3, 3, 6, 5]
true_entities = {
'LOCATION': [(5, 16), (17, 22), (84, 87), (87, 90)],
'ORG': [(31, 37), (90, 95)],
'PERSON': [(49, 59), (61, 70), (95, 99)]
}
calc_entities = BERT_NER.calculate_bounds_of_named_entities(bounds_of_tokens, classes_list, labels_of_tokens)
self.assertIsInstance(calc_entities, dict)
self.assertEqual(set(true_entities.keys()), set(calc_entities.keys()))
for entity_type in true_entities:
self.assertEqual(true_entities[entity_type], calc_entities[entity_type])
def test_get_shape_of_string_positive01(self):
src = '##чники'
dst = 'a'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive02(self):
src = 'уже'
dst = 'a'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive03(self):
src = 'К'
dst = 'A'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive04(self):
src = 'Однако'
dst = 'Aa'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive05(self):
src = '66–67'
dst = 'D-D'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive06(self):
src = '[UNK]'
dst = '[UNK]'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive07(self):
src = '…'
dst = 'U'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive08(self):
src = ','
dst = 'P'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_negative(self):
src = ''
dst = ''
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_subword_ID_positive01(self):
src = '##чники'
dst = 2
self.assertEqual(dst, BERT_NER.get_subword_ID(src))
def test_get_subword_ID_positive02(self):
src = 'Однако'
dst = 3
self.assertEqual(dst, BERT_NER.get_subword_ID(src))
def test_get_subword_ID_positive03(self):
src = '[CLS]'
dst = 0
self.assertEqual(dst, BERT_NER.get_subword_ID(src))
def test_get_subword_ID_positive04(self):
src = '[SEP]'
dst = 1
self.assertEqual(dst, BERT_NER.get_subword_ID(src))
if __name__ == '__main__':
unittest.main(verbosity=2)
| 56.980673 | 120 | 0.633642 | import copy
import gc
import os
import pickle
import re
import sys
import tempfile
import unittest
import numpy as np
from sklearn.exceptions import NotFittedError
from spacy_udpipe.language import UDPipeLanguage
try:
from deep_ner.bert_ner import BERT_NER
from deep_ner.utils import load_dataset_from_json, set_total_seed
from deep_ner.quality import calculate_prediction_quality
from deep_ner.udpipe_data import UNIVERSAL_DEPENDENCIES, UNIVERSAL_POS_TAGS
except:
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from deep_ner.bert_ner import BERT_NER
from deep_ner.utils import load_dataset_from_json, set_total_seed
from deep_ner.quality import calculate_prediction_quality
from deep_ner.udpipe_data import UNIVERSAL_DEPENDENCIES, UNIVERSAL_POS_TAGS
class TestBertNer(unittest.TestCase):
@classmethod
def setUpClass(cls):
set_total_seed(0)
def tearDown(self):
if hasattr(self, 'ner'):
del self.ner
if hasattr(self, 'another_ner'):
del self.another_ner
if hasattr(self, 'temp_file_name'):
if os.path.isfile(self.temp_file_name):
os.remove(self.temp_file_name)
def test_creation(self):
self.ner = BERT_NER(udpipe_lang='en')
self.assertIsInstance(self.ner, BERT_NER)
self.assertTrue(hasattr(self.ner, 'udpipe_lang'))
self.assertTrue(hasattr(self.ner, 'use_shapes'))
self.assertTrue(hasattr(self.ner, 'use_nlp_features'))
self.assertTrue(hasattr(self.ner, 'batch_size'))
self.assertTrue(hasattr(self.ner, 'lstm_units'))
self.assertTrue(hasattr(self.ner, 'lr'))
self.assertTrue(hasattr(self.ner, 'l2_reg'))
self.assertTrue(hasattr(self.ner, 'clip_norm'))
self.assertTrue(hasattr(self.ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.ner, 'finetune_bert'))
self.assertTrue(hasattr(self.ner, 'max_epochs'))
self.assertTrue(hasattr(self.ner, 'patience'))
self.assertTrue(hasattr(self.ner, 'random_seed'))
self.assertTrue(hasattr(self.ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.ner, 'max_seq_length'))
self.assertTrue(hasattr(self.ner, 'validation_fraction'))
self.assertTrue(hasattr(self.ner, 'verbose'))
self.assertIsInstance(self.ner.batch_size, int)
self.assertIsInstance(self.ner.lstm_units, int)
self.assertIsInstance(self.ner.lr, float)
self.assertIsInstance(self.ner.l2_reg, float)
self.assertIsInstance(self.ner.clip_norm, float)
self.assertIsInstance(self.ner.bert_hub_module_handle, str)
self.assertIsInstance(self.ner.udpipe_lang, str)
self.assertIsInstance(self.ner.finetune_bert, bool)
self.assertIsInstance(self.ner.max_epochs, int)
self.assertIsInstance(self.ner.patience, int)
self.assertIsNone(self.ner.random_seed)
self.assertIsInstance(self.ner.gpu_memory_frac, float)
self.assertIsInstance(self.ner.max_seq_length, int)
self.assertIsInstance(self.ner.validation_fraction, float)
self.assertIsInstance(self.ner.verbose, bool)
self.assertIsInstance(self.ner.use_shapes, bool)
self.assertIsInstance(self.ner.use_nlp_features, bool)
def test_check_params_positive(self):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1', finetune_bert=True,
batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.0,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=None,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
self.assertTrue(True)
def test_check_params_negative001(self):
true_err_msg = re.escape('`bert_hub_module_handle` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=False, udpipe_lang='en'
)
def test_check_params_negative002(self):
true_err_msg = re.escape('`bert_hub_module_handle` is wrong! Expected `{0}`, got `{1}`.'.format(
type('abc'), type(123)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle=1, finetune_bert=True,
batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=False, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative003(self):
true_err_msg = re.escape('`batch_size` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative004(self):
true_err_msg = re.escape('`batch_size` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size='32', max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative005(self):
true_err_msg = re.escape('`batch_size` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=-3, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative006(self):
true_err_msg = re.escape('`max_epochs` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative007(self):
true_err_msg = re.escape('`max_epochs` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs='10', patience=3, gpu_memory_frac=1.0, verbose=False,
random_seed=42, lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative008(self):
true_err_msg = re.escape('`max_epochs` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=-3, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative009(self):
true_err_msg = re.escape('`patience` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative010(self):
true_err_msg = re.escape('`patience` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience='3', gpu_memory_frac=1.0, verbose=False,
random_seed=42, lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative011(self):
true_err_msg = re.escape('`patience` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=-3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative012(self):
true_err_msg = re.escape('`max_seq_length` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, lr=1e-3, l2_reg=1e-4, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative013(self):
true_err_msg = re.escape('`max_seq_length` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length='512', lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative014(self):
true_err_msg = re.escape('`max_seq_length` is wrong! Expected a positive integer value, but -3 is not '
'positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=-3, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative015(self):
true_err_msg = re.escape('`validation_fraction` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative016(self):
true_err_msg = re.escape('`validation_fraction` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction='0.1', max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False,
random_seed=42, lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative017(self):
true_err_msg = '`validation_fraction` is wrong! Expected a positive floating-point value greater than or ' \
'equal to 0.0, but {0} is not positive.'.format(-0.1)
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=-0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative018(self):
true_err_msg = '`validation_fraction` is wrong! Expected a positive floating-point value less than 1.0, but ' \
'{0} is not less than 1.0.'.format(1.1)
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=1.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative019(self):
true_err_msg = re.escape('`gpu_memory_frac` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative020(self):
true_err_msg = re.escape('`gpu_memory_frac` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac='1.0', verbose=False,
random_seed=42, lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative021(self):
true_err_msg = re.escape('`gpu_memory_frac` is wrong! Expected a floating-point value in the (0.0, 1.0], '
'but {0} is not proper.'.format(-1.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=-1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative022(self):
true_err_msg = re.escape('`gpu_memory_frac` is wrong! Expected a floating-point value in the (0.0, 1.0], '
'but {0} is not proper.'.format(1.3))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.3, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative023(self):
true_err_msg = re.escape('`lr` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative024(self):
true_err_msg = re.escape('`lr` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr='1e-3', l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative025(self):
true_err_msg = re.escape('`lr` is wrong! Expected a positive floating-point value, but {0} is not '
'positive.'.format(0.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=0.0, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative026(self):
true_err_msg = re.escape('`lr` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative027(self):
true_err_msg = re.escape('`lr` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr='1e-3', l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative028(self):
true_err_msg = re.escape('`lr` is wrong! Expected a positive floating-point value, but {0} is not '
'positive.'.format(0.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=0.0, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative029(self):
true_err_msg = re.escape('`l2_reg` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, clip_norm=5.0, validation_fraction=0.1,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative030(self):
true_err_msg = re.escape('`l2_reg` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3.5), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg='1e-4', clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative031(self):
true_err_msg = re.escape('`l2_reg` is wrong! Expected a non-negative floating-point value, but {0} is '
'negative.'.format(-2.0))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=-2.0, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative032(self):
true_err_msg = re.escape('`finetune_bert` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, validation_fraction=0.1, clip_norm=5.0,
max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative033(self):
true_err_msg = re.escape('`finetune_bert` is wrong! Expected `{0}`, got `{1}`.'.format(
type(True), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert='True', batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative034(self):
true_err_msg = re.escape('`verbose` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, random_seed=42, lstm_units=128,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative035(self):
true_err_msg = re.escape('`verbose` is wrong! Expected `{0}`, got `{1}`.'.format(
type(True), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose='False',
random_seed=42, lstm_units=128, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative036(self):
true_err_msg = re.escape('`lstm_units` is not specified!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative037(self):
true_err_msg = re.escape('`lstm_units` is wrong! Expected `{0}`, got `{1}`.'.format(
type(3), type('3')))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
lstm_units='128', finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4,
clip_norm=5.0, validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False,
random_seed=42, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_params_negative038(self):
true_err_msg = re.escape('`lstm_units` is wrong! Expected a positive integer value, but -3 is not positive.')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_params(
bert_hub_module_handle='https://tfhub.dev/google/bert_multi_cased_L-12_H-768_A-12/1',
finetune_bert=True, batch_size=32, max_seq_length=512, lr=1e-3, l2_reg=1e-4, clip_norm=5.0,
validation_fraction=0.1, max_epochs=10, patience=3, gpu_memory_frac=1.0, verbose=False, random_seed=42,
lstm_units=-3, use_shapes=True, use_nlp_features=True, udpipe_lang='en'
)
def test_check_X_positive(self):
X = ['abc', 'defgh', '4wdffg']
BERT_NER.check_X(X, 'X_train')
self.assertTrue(True)
def test_check_X_negative01(self):
X = {'abc', 'defgh', '4wdffg'}
true_err_msg = re.escape('`X_train` is wrong, because it is not list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_X(X, 'X_train')
def test_check_X_negative02(self):
X = np.random.uniform(-1.0, 1.0, (10, 2))
true_err_msg = re.escape('`X_train` is wrong, because it is not 1-D list!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_X(X, 'X_train')
def test_check_X_negative03(self):
X = ['abc', 23, '4wdffg']
true_err_msg = re.escape('Item 1 of `X_train` is wrong, because it is not string-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_X(X, 'X_train')
def text_check_Xy_positive(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_classes_list = ('LOC', 'ORG', 'PER')
self.assertEqual(true_classes_list, BERT_NER.check_Xy(X, 'X_train', y, 'y_train'))
def text_check_Xy_negative01(self):
X = {
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
}
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('`X_train` is wrong, because it is not list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative02(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = {
'1': {
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
'2': {
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
}
true_err_msg = re.escape('`y_train` is wrong, because it is not a list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative03(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = np.random.uniform(-1.0, 1.0, (10, 2))
true_err_msg = re.escape('`y_train` is wrong, because it is not 1-D list!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative04(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
},
{
'LOC': [(17, 24), (117, 130)]
}
]
true_err_msg = re.escape('Length of `X_train` does not correspond to length of `y_train`! 2 != 3')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative05(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
4
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because it is not a dictionary-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative06(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
1: [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because its key `1` is not a string-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative07(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'O': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because its key `O` incorrectly specifies a named '
'entity!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative08(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'123': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because its key `123` incorrectly specifies a named '
'entity!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative09(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'loc': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because its key `loc` incorrectly specifies a named '
'entity!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative10(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': {1, 2}
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because its value `{0}` is not a list-like '
'object!'.format(y[0]['PER']))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative11(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), 63],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because named entity bounds `63` are not specified as '
'list-like object!')
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative12(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77, 81)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 1 of `y_train` is wrong, because named entity bounds `{0}` are not specified as '
'2-D list!'.format((63, 77, 81)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative13(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (219, 196)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because named entity bounds `{0}` are '
'incorrect!'.format((219, 196)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative14(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(122, 137), (196, 519)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because named entity bounds `{0}` are '
'incorrect!'.format((196, 519)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def text_check_Xy_negative15(self):
X = [
'Встреча с послом Италии в миде Грузии. По инициативе итальянской стороны чрезвычайный и полномочный посол '
'Италии в Грузии Виторио Сандали встретился с заместителем министра иностранных дел Грузии Александром '
'Налбандовым.',
'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози. Как было объявлено, '
'президент Франции прибыл в Вашингтон, чтобы обсудить с главой администрации США ряд насущных проблем, '
'главное место среди которых занимает состояние мировой экономики и безопасность.'
]
y = [
{
'ORG': [(26, 37)],
'PER': [(-1, 137), (196, 219)]
},
{
'ORG': [(126, 135)],
'PER': [(0, 11), (63, 77)],
'LOC': [(24, 34), (161, 178)]
}
]
true_err_msg = re.escape('Item 0 of `y_train` is wrong, because named entity bounds `{0}` are '
'incorrect!'.format((-1, 137)))
with self.assertRaisesRegex(ValueError, true_err_msg):
BERT_NER.check_Xy(X, 'X_train', y, 'y_train')
def test_detect_token_labels_positive01(self):
tokenized_text = ['Ба', '##рак', 'Об', '##ама', 'принимает', 'в', 'Б', '##елом', 'доме', 'своего',
'французского', 'кол', '##ле', '##гу', 'Н', '##ико', '##ля', 'Са', '##рко', '##зи', '.']
token_bounds = [(0, 2), (2, 5), (6, 8), (8, 11), (12, 21), (22, 23), (24, 25), (25, 29), (30, 34), (35, 41),
(42, 54), (55, 58), (58, 60), (60, 62), (63, 64), (64, 67), (67, 69), (70, 72), (72, 75),
(75, 77), (77, 78)]
indices_of_named_entities = np.array(
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 0],
dtype=np.int32
)
label_IDs = {1: 1, 2: 2, 3: 1}
y_true = np.array(
[0, 2, 1, 1, 1, 0, 0, 4, 3, 3, 0, 0, 0, 0, 0, 2, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
y_pred = BERT_NER.detect_token_labels(tokenized_text, token_bounds, indices_of_named_entities, label_IDs, 32)
self.assertIsInstance(y_pred, np.ndarray)
self.assertEqual(y_true.shape, y_pred.shape)
self.assertEqual(y_true.tolist(), y_pred.tolist())
def test_detect_token_labels_positive02(self):
tokenized_text = ['С', '1876', 'г', 'Павло', '##в', 'а', '##сси', '##сти', '##рует', 'профессор', '##у', 'К',
'.', 'Н', '.', 'У', '##сти', '##мов', '##ич', '##у', 'в', 'М', '##еди', '##ко', '-',
'х', '##ир', '##ург', '##ической', 'академии', 'и', 'пара', '##лл', '##ельно',
'из', '##уч', '##ает', 'ф', '##из', '##ио', '##логи', '##ю',
'к', '##рово', '##об', '##ращения', '.']
token_bounds = [(0, 1), (2, 6), (7, 8), (9, 14), (14, 15), (16, 17), (17, 20), (20, 23), (23, 27), (28, 37),
(37, 38), (39, 40), (40, 41), (42, 43), (43, 44), (45, 46), (46, 49), (49, 52), (52, 54),
(54, 55), (56, 57), (58, 59), (59, 62), (62, 64), (64, 65), (65, 66), (66, 68), (68, 71),
(71, 78), (79, 87), (88, 89), (90, 94), (94, 96), (96, 101), (102, 104), (104, 106), (106, 109),
(110, 111), (111, 113), (113, 115), (115, 119), (119, 120), (121, 122), (122, 126), (126, 128),
(128, 135), (135, 136)]
indices_of_named_entities = np.array(
[0, 0, 1, 1, 1, 1, 1, 1, 0, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 0, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 0, 0, 0, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
label_IDs = {1: 1, 2: 2, 3: 3, 4: 2, 5: 4}
y_true = np.array(
[0, 0, 2, 1, 4, 3, 0, 0, 0, 0, 6, 5, 4, 3, 3, 3, 3, 3, 3, 3, 3, 0, 8, 7, 7, 7, 7, 7, 7, 7, 7, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
y_pred = BERT_NER.detect_token_labels(tokenized_text, token_bounds, indices_of_named_entities, label_IDs, 64)
self.assertIsInstance(y_pred, np.ndarray)
self.assertEqual(y_true.shape, y_pred.shape)
self.assertEqual(y_true.tolist(), y_pred.tolist())
def test_detect_token_labels_positive03(self):
tokenized_text = ['В', '##есной', '1890', 'года', 'В', '##ар', '##ша', '##вский', 'и', 'Томск', '##ий',
'университет', '##ы', 'из', '##бира', '##ют', 'его', 'профессором', '.']
token_bounds = [(0, 1), (1, 6), (7, 11), (12, 16), (17, 18), (18, 20), (20, 22), (22, 27), (28, 29), (30, 35),
(35, 37), (38, 49), (49, 50), (51, 52), (53, 57), (57, 59), (60, 63), (64, 75), (75, 76)]
indices_of_named_entities = np.array(
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0],
dtype=np.int32
)
label_IDs = {1: 1, 2: 2, 3: 2}
y_true = np.array(
[0, 2, 1, 1, 1, 4, 3, 3, 3, 3, 4, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
dtype=np.int32
)
y_pred = BERT_NER.detect_token_labels(tokenized_text, token_bounds, indices_of_named_entities, label_IDs, 32)
self.assertIsInstance(y_pred, np.ndarray)
self.assertEqual(y_true.shape, y_pred.shape)
self.assertEqual(y_true.tolist(), y_pred.tolist())
def test_calculate_indices_of_named_entities(self):
source_text = 'Барак Обама принимает в Белом доме своего французского коллегу Николя Саркози.'
classes_list = ('LOCATION', 'ORG', 'PERSON')
named_entities = {'PERSON': [(0, 11), (63, 77)], 'LOCATION': [(24, 34)]}
true_indices = np.array(
[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 0],
dtype=np.int32
)
true_labels_to_classes = {1: 1, 2: 3, 3: 3}
indices, labels_to_classes = BERT_NER.calculate_indices_of_named_entities(source_text, classes_list,
named_entities)
self.assertIsInstance(indices, np.ndarray)
self.assertIsInstance(labels_to_classes, dict)
self.assertEqual(true_indices.shape, indices.shape)
self.assertEqual(true_indices.tolist(), indices.tolist())
self.assertEqual(set(true_labels_to_classes.keys()), set(labels_to_classes.keys()))
for label_ID in true_labels_to_classes:
self.assertEqual(true_labels_to_classes[label_ID], labels_to_classes[label_ID])
def test_fit_positive01(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, lstm_units=32, udpipe_lang='ru')
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'udpipe_lang'))
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertTrue(hasattr(res, 'use_shapes'))
self.assertTrue(hasattr(res, 'use_nlp_features'))
self.assertIsInstance(res.udpipe_lang, str)
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertIsInstance(res.use_shapes, bool)
self.assertIsInstance(res.use_nlp_features, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'shapes_list_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertTrue(hasattr(res, 'universal_pos_tags_dict_'))
self.assertTrue(hasattr(res, 'universal_dependencies_dict_'))
self.assertTrue(hasattr(res, 'nlp_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
self.assertIsInstance(res.shapes_list_, tuple)
self.assertGreater(len(res.shapes_list_), 3)
self.assertEqual(res.shapes_list_[-3:], ('[CLS]', '[SEP]', '[UNK]'))
self.assertIsInstance(res.universal_pos_tags_dict_, dict)
self.assertIsInstance(res.universal_dependencies_dict_, dict)
self.assertIsInstance(res.nlp_, UDPipeLanguage)
self.assertEqual(len(res.universal_pos_tags_dict_), len(UNIVERSAL_POS_TAGS))
self.assertEqual(len(res.universal_dependencies_dict_), len(UNIVERSAL_DEPENDENCIES))
def test_fit_positive02(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=True, max_epochs=3, batch_size=2, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=42, lstm_units=32, udpipe_lang='ru')
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'udpipe_lang'))
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertTrue(hasattr(res, 'use_shapes'))
self.assertTrue(hasattr(res, 'use_nlp_features'))
self.assertIsInstance(res.udpipe_lang, str)
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertIsInstance(res.use_shapes, bool)
self.assertIsInstance(res.use_nlp_features, bool)
self.assertEqual(res.random_seed, 42)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'shapes_list_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
self.assertIsInstance(res.shapes_list_, tuple)
self.assertGreater(len(res.shapes_list_), 3)
self.assertEqual(res.shapes_list_[-3:], ('[CLS]', '[SEP]', '[UNK]'))
def test_fit_positive03(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, lstm_units=None, clip_norm=None,
udpipe_lang='ru', use_shapes=False, use_nlp_features=True)
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'udpipe_lang'))
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertTrue(hasattr(res, 'use_shapes'))
self.assertTrue(hasattr(res, 'use_nlp_features'))
self.assertIsInstance(res.udpipe_lang, str)
self.assertIsInstance(res.batch_size, int)
self.assertIsNone(res.lstm_units)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsNone(res.clip_norm, None)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertIsInstance(res.use_shapes, bool)
self.assertIsInstance(res.use_nlp_features, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'shapes_list_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
self.assertIsInstance(res.shapes_list_, tuple)
self.assertGreater(len(res.shapes_list_), 3)
self.assertEqual(res.shapes_list_[-3:], ('[CLS]', '[SEP]', '[UNK]'))
def test_fit_predict(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=5, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=42, udpipe_lang='ru', use_shapes=True,
use_nlp_features=False)
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'udpipe_lang'))
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertTrue(hasattr(res, 'use_shapes'))
self.assertTrue(hasattr(res, 'use_nlp_features'))
self.assertIsInstance(res.udpipe_lang, str)
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertIsInstance(res.use_shapes, bool)
self.assertIsInstance(res.use_nlp_features, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'shapes_list_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
self.assertIsInstance(res.shapes_list_, tuple)
self.assertGreater(len(res.shapes_list_), 3)
self.assertEqual(res.shapes_list_[-3:], ('[CLS]', '[SEP]', '[UNK]'))
y_pred = res.predict(X_train)
self.assertIsInstance(y_pred, list)
self.assertEqual(len(X_train), len(y_pred))
for sample_idx in range(len(y_pred)):
self.assertIsInstance(y_pred[sample_idx], dict)
f1, precision, recall, _ = calculate_prediction_quality(y_train, y_pred, res.classes_list_)
self.assertGreater(f1, 0.0)
self.assertGreater(precision, 0.0)
self.assertGreater(recall, 0.0)
def test_predict_negative(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, random_seed=None, udpipe_lang='ru')
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
with self.assertRaises(NotFittedError):
_ = self.ner.predict(X_train)
def test_tokenize_all_01(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=1, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, udpipe_lang='ru', use_nlp_features=True,
use_shapes=True)
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
self.ner.fit(X_train, y_train)
res = self.ner.tokenize_all(X_train, y_train, shapes_vocabulary=self.ner.shapes_list_)
self.assertIsInstance(res, tuple)
self.assertEqual(len(res), 4)
X_train_tokenized, y_train_tokenized, shapes_list, bounds_of_tokens_for_training = res
self.assertIsInstance(X_train_tokenized, list)
self.assertIsInstance(y_train_tokenized, np.ndarray)
self.assertIs(self.ner.shapes_list_, shapes_list)
self.assertIsInstance(bounds_of_tokens_for_training, np.ndarray)
self.assertEqual(len(X_train_tokenized), 4)
self.assertEqual(y_train_tokenized.shape, (len(y_train), self.ner.max_seq_length))
for data_idx in range(3):
self.assertIsInstance(X_train_tokenized[data_idx], np.ndarray)
self.assertEqual(X_train_tokenized[data_idx].shape, (len(X_train), self.ner.max_seq_length))
data_idx = 3
self.assertIsInstance(X_train_tokenized[data_idx], np.ndarray)
self.assertEqual(len(X_train_tokenized[data_idx].shape), 3)
self.assertEqual(X_train_tokenized[data_idx].shape[0], len(X_train))
self.assertEqual(X_train_tokenized[data_idx].shape[1], self.ner.max_seq_length)
self.assertGreater(X_train_tokenized[data_idx].shape[2],
4 + len(UNIVERSAL_POS_TAGS) + len(UNIVERSAL_DEPENDENCIES))
for sample_idx in range(X_train_tokenized[data_idx].shape[0]):
n = 0
for token_idx in range(X_train_tokenized[data_idx].shape[1]):
if X_train_tokenized[data_idx][sample_idx][token_idx].sum() < 1e-3:
break
n += 1
self.assertGreater(n, 0, msg='Sample {0}: additional features are not defined!'.format(sample_idx))
for token_idx in range(n):
self.assertAlmostEqual(X_train_tokenized[data_idx][sample_idx][token_idx][0:4].sum(), 1.0,
msg='Sample {0}, token {1}: additional features are wrong!'.format(
sample_idx, token_idx))
for token_idx in range(1, n - 1):
start_pos = 4
end_pos = 4 + len(UNIVERSAL_POS_TAGS)
self.assertAlmostEqual(X_train_tokenized[data_idx][sample_idx][token_idx][start_pos:end_pos].sum(), 1.0,
msg='Sample {0}, token {1}: part of speech is not defined!'.format(
sample_idx, token_idx))
start_pos = 4 + len(UNIVERSAL_POS_TAGS)
end_pos = 4 + len(UNIVERSAL_POS_TAGS) + len(UNIVERSAL_DEPENDENCIES)
self.assertGreaterEqual(X_train_tokenized[data_idx][sample_idx][token_idx][start_pos:end_pos].sum(),
1.0,
msg='Sample {0}, token {1}: dependency tag is not defined!'.format(
sample_idx, token_idx))
def test_tokenize_all_02(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=1, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, udpipe_lang='ru', use_shapes=False,
use_nlp_features=False)
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
self.ner.fit(X_train, y_train)
res = self.ner.tokenize_all(X_train, y_train, shapes_vocabulary=self.ner.shapes_list_)
self.assertIsInstance(res, tuple)
self.assertEqual(len(res), 4)
X_train_tokenized, y_train_tokenized, shapes_list, bounds_of_tokens_for_training = res
self.assertIsInstance(X_train_tokenized, list)
self.assertIsInstance(y_train_tokenized, np.ndarray)
self.assertIs(self.ner.shapes_list_, shapes_list)
self.assertIsInstance(bounds_of_tokens_for_training, np.ndarray)
self.assertEqual(len(X_train_tokenized), 3)
self.assertEqual(y_train_tokenized.shape, (len(y_train), self.ner.max_seq_length))
for data_idx in range(3):
self.assertIsInstance(X_train_tokenized[data_idx], np.ndarray)
self.assertEqual(X_train_tokenized[data_idx].shape, (len(X_train), self.ner.max_seq_length))
def test_serialize_positive01(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=5, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=42, udpipe_lang='ru')
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
res = self.ner.fit(X_train, y_train)
self.assertIsInstance(res, BERT_NER)
self.assertTrue(hasattr(res, 'udpipe_lang'))
self.assertTrue(hasattr(res, 'batch_size'))
self.assertTrue(hasattr(res, 'lstm_units'))
self.assertTrue(hasattr(res, 'lr'))
self.assertTrue(hasattr(res, 'l2_reg'))
self.assertTrue(hasattr(res, 'clip_norm'))
self.assertTrue(hasattr(res, 'bert_hub_module_handle'))
self.assertTrue(hasattr(res, 'finetune_bert'))
self.assertTrue(hasattr(res, 'max_epochs'))
self.assertTrue(hasattr(res, 'patience'))
self.assertTrue(hasattr(res, 'random_seed'))
self.assertTrue(hasattr(res, 'gpu_memory_frac'))
self.assertTrue(hasattr(res, 'max_seq_length'))
self.assertTrue(hasattr(res, 'validation_fraction'))
self.assertTrue(hasattr(res, 'verbose'))
self.assertTrue(hasattr(res, 'use_shapes'))
self.assertTrue(hasattr(res, 'use_nlp_features'))
self.assertIsInstance(res.udpipe_lang, str)
self.assertIsInstance(res.batch_size, int)
self.assertIsInstance(res.lstm_units, int)
self.assertIsInstance(res.lr, float)
self.assertIsInstance(res.l2_reg, float)
self.assertIsInstance(res.clip_norm, float)
self.assertIsInstance(res.bert_hub_module_handle, str)
self.assertIsInstance(res.finetune_bert, bool)
self.assertIsInstance(res.max_epochs, int)
self.assertIsInstance(res.patience, int)
self.assertIsInstance(res.random_seed, int)
self.assertIsInstance(res.gpu_memory_frac, float)
self.assertIsInstance(res.max_seq_length, int)
self.assertIsInstance(res.validation_fraction, float)
self.assertIsInstance(res.verbose, bool)
self.assertIsInstance(res.use_shapes, bool)
self.assertIsInstance(res.use_nlp_features, bool)
self.assertTrue(hasattr(res, 'classes_list_'))
self.assertTrue(hasattr(res, 'shapes_list_'))
self.assertTrue(hasattr(res, 'tokenizer_'))
self.assertTrue(hasattr(res, 'sess_'))
self.assertEqual(res.classes_list_, ('LOCATION', 'ORG', 'PERSON'))
self.assertIsInstance(res.shapes_list_, tuple)
self.assertGreater(len(res.shapes_list_), 3)
self.assertEqual(res.shapes_list_[-3:], ('[CLS]', '[SEP]', '[UNK]'))
y_pred1 = res.predict(X_train)
self.assertIsInstance(y_pred1, list)
self.assertEqual(len(X_train), len(y_pred1))
for sample_idx in range(len(y_pred1)):
self.assertIsInstance(y_pred1[sample_idx], dict)
f1, precision, recall, _ = calculate_prediction_quality(y_train, y_pred1, res.classes_list_)
self.assertGreater(f1, 0.0)
self.assertGreater(precision, 0.0)
self.assertGreater(recall, 0.0)
with tempfile.NamedTemporaryFile(mode='w', delete=True) as fp:
self.temp_file_name = fp.name
with open(self.temp_file_name, mode='wb') as fp:
pickle.dump(res, fp)
del res, self.ner
gc.collect()
with open(self.temp_file_name, mode='rb') as fp:
self.ner = pickle.load(fp)
y_pred2 = self.ner.predict(X_train)
self.assertIsInstance(y_pred2, list)
self.assertEqual(len(y_pred2), len(y_pred2))
for sample_idx in range(len(y_pred2)):
self.assertIsInstance(y_pred2[sample_idx], dict)
self.assertEqual(set(y_pred1[sample_idx]), set(y_pred2[sample_idx]))
for ne_type in y_pred1[sample_idx]:
self.assertEqual(y_pred1[sample_idx][ne_type], y_pred2[sample_idx][ne_type])
def test_serialize_positive02(self):
self.ner = BERT_NER(random_seed=31, udpipe_lang='ru')
old_udpipe_lang = self.ner.udpipe_lang
old_batch_size = self.ner.batch_size
old_lstm_units = self.ner.lstm_units
old_lr = self.ner.lr
old_l2_reg = self.ner.l2_reg
old_clip_norm = self.ner.clip_norm
old_bert_hub_module_handle = self.ner.bert_hub_module_handle
old_finetune_bert = self.ner.finetune_bert
old_max_epochs = self.ner.max_epochs
old_patience = self.ner.patience
old_random_seed = self.ner.random_seed
old_gpu_memory_frac = self.ner.gpu_memory_frac
old_max_seq_length = self.ner.max_seq_length
old_validation_fraction = self.ner.validation_fraction
old_verbose = self.ner.verbose
old_use_shapes = self.ner.use_shapes
old_use_nlp_features = self.ner.use_nlp_features
with tempfile.NamedTemporaryFile(mode='w', delete=True) as fp:
self.temp_file_name = fp.name
with open(self.temp_file_name, mode='wb') as fp:
pickle.dump(self.ner, fp)
del self.ner
gc.collect()
with open(self.temp_file_name, mode='rb') as fp:
self.ner = pickle.load(fp)
self.assertIsInstance(self.ner, BERT_NER)
self.assertTrue(hasattr(self.ner, 'udpipe_lang'))
self.assertTrue(hasattr(self.ner, 'batch_size'))
self.assertTrue(hasattr(self.ner, 'lstm_units'))
self.assertTrue(hasattr(self.ner, 'lr'))
self.assertTrue(hasattr(self.ner, 'l2_reg'))
self.assertTrue(hasattr(self.ner, 'clip_norm'))
self.assertTrue(hasattr(self.ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.ner, 'finetune_bert'))
self.assertTrue(hasattr(self.ner, 'max_epochs'))
self.assertTrue(hasattr(self.ner, 'patience'))
self.assertTrue(hasattr(self.ner, 'random_seed'))
self.assertTrue(hasattr(self.ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.ner, 'max_seq_length'))
self.assertTrue(hasattr(self.ner, 'validation_fraction'))
self.assertTrue(hasattr(self.ner, 'verbose'))
self.assertTrue(hasattr(self.ner, 'use_shapes'))
self.assertTrue(hasattr(self.ner, 'use_nlp_features'))
self.assertEqual(self.ner.udpipe_lang, old_udpipe_lang)
self.assertEqual(self.ner.batch_size, old_batch_size)
self.assertEqual(self.ner.lstm_units, old_lstm_units)
self.assertAlmostEqual(self.ner.lr, old_lr)
self.assertAlmostEqual(self.ner.l2_reg, old_l2_reg)
self.assertAlmostEqual(self.ner.clip_norm, old_clip_norm)
self.assertEqual(self.ner.bert_hub_module_handle, old_bert_hub_module_handle)
self.assertEqual(self.ner.finetune_bert, old_finetune_bert)
self.assertEqual(self.ner.max_epochs, old_max_epochs)
self.assertEqual(self.ner.patience, old_patience)
self.assertAlmostEqual(self.ner.gpu_memory_frac, old_gpu_memory_frac)
self.assertEqual(self.ner.max_seq_length, old_max_seq_length)
self.assertAlmostEqual(self.ner.validation_fraction, old_validation_fraction)
self.assertEqual(self.ner.verbose, old_verbose)
self.assertEqual(self.ner.use_shapes, old_use_shapes)
self.assertEqual(self.ner.use_nlp_features, old_use_nlp_features)
self.assertEqual(self.ner.random_seed, old_random_seed)
def test_copy_positive01(self):
self.ner = BERT_NER(random_seed=0, udpipe_lang='ru', use_shapes=False, use_nlp_features=True)
self.another_ner = copy.copy(self.ner)
self.assertIsInstance(self.another_ner, BERT_NER)
self.assertIsNot(self.ner, self.another_ner)
self.assertTrue(hasattr(self.another_ner, 'udpipe_lang'))
self.assertTrue(hasattr(self.another_ner, 'batch_size'))
self.assertTrue(hasattr(self.another_ner, 'lstm_units'))
self.assertTrue(hasattr(self.another_ner, 'lr'))
self.assertTrue(hasattr(self.another_ner, 'l2_reg'))
self.assertTrue(hasattr(self.another_ner, 'clip_norm'))
self.assertTrue(hasattr(self.another_ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.another_ner, 'finetune_bert'))
self.assertTrue(hasattr(self.another_ner, 'max_epochs'))
self.assertTrue(hasattr(self.another_ner, 'patience'))
self.assertTrue(hasattr(self.another_ner, 'random_seed'))
self.assertTrue(hasattr(self.another_ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.another_ner, 'max_seq_length'))
self.assertTrue(hasattr(self.another_ner, 'validation_fraction'))
self.assertTrue(hasattr(self.another_ner, 'verbose'))
self.assertTrue(hasattr(self.another_ner, 'use_shapes'))
self.assertTrue(hasattr(self.another_ner, 'use_nlp_features'))
self.assertEqual(self.ner.udpipe_lang, self.another_ner.udpipe_lang)
self.assertEqual(self.ner.batch_size, self.another_ner.batch_size)
self.assertEqual(self.ner.lstm_units, self.another_ner.lstm_units)
self.assertAlmostEqual(self.ner.lr, self.another_ner.lr)
self.assertAlmostEqual(self.ner.l2_reg, self.another_ner.l2_reg)
self.assertAlmostEqual(self.ner.clip_norm, self.another_ner.clip_norm)
self.assertEqual(self.ner.bert_hub_module_handle, self.another_ner.bert_hub_module_handle)
self.assertEqual(self.ner.finetune_bert, self.another_ner.finetune_bert)
self.assertEqual(self.ner.max_epochs, self.another_ner.max_epochs)
self.assertEqual(self.ner.patience, self.another_ner.patience)
self.assertEqual(self.ner.random_seed, self.another_ner.random_seed)
self.assertAlmostEqual(self.ner.gpu_memory_frac, self.another_ner.gpu_memory_frac)
self.assertEqual(self.ner.max_seq_length, self.another_ner.max_seq_length)
self.assertAlmostEqual(self.ner.validation_fraction, self.another_ner.validation_fraction)
self.assertEqual(self.ner.verbose, self.another_ner.verbose)
self.assertEqual(self.ner.use_shapes, self.another_ner.use_shapes)
self.assertEqual(self.ner.use_nlp_features, self.another_ner.use_nlp_features)
def test_copy_positive02(self):
base_dir = os.path.join(os.path.dirname(__file__), 'testdata')
self.ner = BERT_NER(finetune_bert=False, max_epochs=3, batch_size=4, max_seq_length=128, gpu_memory_frac=0.9,
validation_fraction=0.3, random_seed=None, udpipe_lang='ru')
X_train, y_train = load_dataset_from_json(os.path.join(base_dir, 'true_named_entities.json'))
self.ner.fit(X_train, y_train)
self.another_ner = copy.copy(self.ner)
self.assertIsInstance(self.another_ner, BERT_NER)
self.assertIsNot(self.ner, self.another_ner)
self.assertTrue(hasattr(self.another_ner, 'udpipe_lang'))
self.assertTrue(hasattr(self.another_ner, 'batch_size'))
self.assertTrue(hasattr(self.another_ner, 'lstm_units'))
self.assertTrue(hasattr(self.another_ner, 'lr'))
self.assertTrue(hasattr(self.another_ner, 'l2_reg'))
self.assertTrue(hasattr(self.another_ner, 'clip_norm'))
self.assertTrue(hasattr(self.another_ner, 'bert_hub_module_handle'))
self.assertTrue(hasattr(self.another_ner, 'finetune_bert'))
self.assertTrue(hasattr(self.another_ner, 'max_epochs'))
self.assertTrue(hasattr(self.another_ner, 'patience'))
self.assertTrue(hasattr(self.another_ner, 'random_seed'))
self.assertTrue(hasattr(self.another_ner, 'gpu_memory_frac'))
self.assertTrue(hasattr(self.another_ner, 'max_seq_length'))
self.assertTrue(hasattr(self.another_ner, 'validation_fraction'))
self.assertTrue(hasattr(self.another_ner, 'verbose'))
self.assertTrue(hasattr(self.another_ner, 'use_shapes'))
self.assertTrue(hasattr(self.another_ner, 'use_nlp_features'))
self.assertTrue(hasattr(self.another_ner, 'classes_list_'))
self.assertTrue(hasattr(self.another_ner, 'shapes_list_'))
self.assertTrue(hasattr(self.another_ner, 'tokenizer_'))
self.assertTrue(hasattr(self.another_ner, 'sess_'))
self.assertEqual(self.ner.udpipe_lang, self.another_ner.udpipe_lang)
self.assertEqual(self.ner.batch_size, self.another_ner.batch_size)
self.assertEqual(self.ner.lstm_units, self.another_ner.lstm_units)
self.assertAlmostEqual(self.ner.lr, self.another_ner.lr)
self.assertAlmostEqual(self.ner.l2_reg, self.another_ner.l2_reg)
self.assertAlmostEqual(self.ner.clip_norm, self.another_ner.clip_norm)
self.assertEqual(self.ner.bert_hub_module_handle, self.another_ner.bert_hub_module_handle)
self.assertEqual(self.ner.finetune_bert, self.another_ner.finetune_bert)
self.assertEqual(self.ner.max_epochs, self.another_ner.max_epochs)
self.assertEqual(self.ner.patience, self.another_ner.patience)
self.assertEqual(self.ner.random_seed, self.another_ner.random_seed)
self.assertAlmostEqual(self.ner.gpu_memory_frac, self.another_ner.gpu_memory_frac)
self.assertEqual(self.ner.max_seq_length, self.another_ner.max_seq_length)
self.assertAlmostEqual(self.ner.validation_fraction, self.another_ner.validation_fraction)
self.assertEqual(self.ner.verbose, self.another_ner.verbose)
self.assertEqual(self.ner.use_shapes, self.another_ner.use_shapes)
self.assertEqual(self.ner.use_nlp_features, self.another_ner.use_nlp_features)
self.assertIs(self.ner.classes_list_, self.another_ner.classes_list_)
self.assertIs(self.ner.shapes_list_, self.another_ner.shapes_list_)
self.assertIs(self.ner.tokenizer_, self.another_ner.tokenizer_)
self.assertIs(self.ner.sess_, self.another_ner.sess_)
def test_calculate_bounds_of_named_entities(self):
bounds_of_tokens = [(0, 2), (2, 5), (5, 8), (8, 10), (11, 16), (17, 20), (20, 22), (22, 26), (26, 27), (28, 31),
(31, 34), (34, 37), (38, 48), (49, 52), (52, 54), (55, 57), (58, 59), (59, 61), (61, 63),
(64, 70), (71, 83), (84, 87), (87, 90), (90, 93), (93, 95), (95, 98), (98, 99)]
classes_list = ('LOCATION', 'ORG', 'PERSON')
labels_of_tokens = [0, 0, 2, 1, 1, 2, 1, 0, 0, 0, 4, 3, 0, 6, 5, 5, 5, 0, 5, 5, 0, 2, 2, 3, 3, 6, 5]
true_entities = {
'LOCATION': [(5, 16), (17, 22), (84, 87), (87, 90)],
'ORG': [(31, 37), (90, 95)],
'PERSON': [(49, 59), (61, 70), (95, 99)]
}
calc_entities = BERT_NER.calculate_bounds_of_named_entities(bounds_of_tokens, classes_list, labels_of_tokens)
self.assertIsInstance(calc_entities, dict)
self.assertEqual(set(true_entities.keys()), set(calc_entities.keys()))
for entity_type in true_entities:
self.assertEqual(true_entities[entity_type], calc_entities[entity_type])
def test_get_shape_of_string_positive01(self):
src = '##чники'
dst = 'a'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive02(self):
src = 'уже'
dst = 'a'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive03(self):
src = 'К'
dst = 'A'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive04(self):
src = 'Однако'
dst = 'Aa'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive05(self):
src = '66–67'
dst = 'D-D'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive06(self):
src = '[UNK]'
dst = '[UNK]'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive07(self):
src = '…'
dst = 'U'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_positive08(self):
src = ','
dst = 'P'
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_shape_of_string_negative(self):
src = ''
dst = ''
self.assertEqual(dst, BERT_NER.get_shape_of_string(src))
def test_get_subword_ID_positive01(self):
src = '##чники'
dst = 2
self.assertEqual(dst, BERT_NER.get_subword_ID(src))
def test_get_subword_ID_positive02(self):
src = 'Однако'
dst = 3
self.assertEqual(dst, BERT_NER.get_subword_ID(src))
def test_get_subword_ID_positive03(self):
src = '[CLS]'
dst = 0
self.assertEqual(dst, BERT_NER.get_subword_ID(src))
def test_get_subword_ID_positive04(self):
src = '[SEP]'
dst = 1
self.assertEqual(dst, BERT_NER.get_subword_ID(src))
if __name__ == '__main__':
unittest.main(verbosity=2)
| true | true |
1c384a7676714a112d2709b7de2e06ce6bb4ddd8 | 856 | py | Python | tests/test_packages/test_connections/test_tcp/__init__.py | ejfitzgerald/agents-aea | 6411fcba8af2cdf55a3005939ae8129df92e8c3e | [
"Apache-2.0"
] | 126 | 2019-09-07T09:32:44.000Z | 2022-03-29T14:28:41.000Z | tests/test_packages/test_connections/test_tcp/__init__.py | ejfitzgerald/agents-aea | 6411fcba8af2cdf55a3005939ae8129df92e8c3e | [
"Apache-2.0"
] | 1,814 | 2019-08-24T10:08:07.000Z | 2022-03-31T14:28:36.000Z | tests/test_packages/test_connections/test_tcp/__init__.py | ejfitzgerald/agents-aea | 6411fcba8af2cdf55a3005939ae8129df92e8c3e | [
"Apache-2.0"
] | 46 | 2019-09-03T22:13:58.000Z | 2022-03-22T01:25:16.000Z | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This module contains the tests of the TCP connection."""
| 40.761905 | 80 | 0.58528 | true | true | |
1c384c91f36d0a8c490fb7657c500258ebef08fc | 2,882 | py | Python | utils/common.py | projekgallus/gallus-gui | c7effdcbc68d64d60a96b68a1064d0f590b449b8 | [
"BSD-3-Clause"
] | null | null | null | utils/common.py | projekgallus/gallus-gui | c7effdcbc68d64d60a96b68a1064d0f590b449b8 | [
"BSD-3-Clause"
] | null | null | null | utils/common.py | projekgallus/gallus-gui | c7effdcbc68d64d60a96b68a1064d0f590b449b8 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
## Copyright (c) 2018, Projek Gallus
## Copyright (c) 2017, The Sumokoin Project (www.sumokoin.org)
'''
Misc utility classes/functions for application
'''
import os, sys, string
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class DummyStream:
''' dummyStream behaves like a stream but does nothing. '''
def __init__(self): pass
def write(self,data): pass
def read(self,data): pass
def flush(self): pass
def close(self): pass
def getAppPath():
'''Get the path to this script no matter how it's run.'''
#Determine if the application is a py/pyw or a frozen exe.
if hasattr(sys, 'frozen'):
# If run from exe
dir_path = os.path.dirname(unicode(sys.executable, sys.getfilesystemencoding()))
elif '__file__' in locals():
# If run from py
dir_path = os.path.dirname(unicode(__file__, sys.getfilesystemencoding()))
else:
# If run from command line
#dir_path = sys.path[0]
dir_path = os.getcwdu()
return dir_path
def getHomeDir():
if sys.platform == 'win32':
import winpaths
homedir = winpaths.get_common_appdata() # = e.g 'C:\ProgramData'
else:
homedir = os.path.expanduser("~")
return homedir
def getSockDir():
if sys.platform == 'win32':
import winpaths
homedir = winpaths.get_appdata() # = e.g 'C:\ProgramData'
else:
homedir = os.path.expanduser("~")
return homedir
def makeDir(d):
if not os.path.exists(d):
os.makedirs(d)
return d
def ensureDir(f):
d = os.path.dirname(f)
if not os.path.exists(d):
os.makedirs(d)
return f
def _xorData(data):
"""Xor Method, Take a data Xor all bytes and return"""
data = [chr(ord(c) ^ 10) for c in data]
return string.join(data, '')
def readFile(path, offset=0, size=-1, xor_data=False):
"""Read specified block from file, using the given size and offset"""
fd = open(path, 'rb')
fd.seek(offset)
data = fd.read(size)
fd.close()
return _xorData(data) if xor_data else data
def writeFile(path, buf, offset=0, xor_data=False):
"""Write specified block on file at the given offset"""
if xor_data:
buf = _xorData(buf)
fd = open(path, 'wb')
fd.seek(offset)
fd.write(buf)
fd.close()
return len(buf)
def print_money(amount):
try:
amount = int(amount)
except:
raise Exception("Error parsing amount. Money amount must be an integer.")
return "%s <small>GAC</small>" % ("{:,.9f}".format(amount/1000000000.))
def print_money2(amount):
try:
amount = int(amount)
except:
raise Exception("Error parsing amount. Money amount must be an integer.")
return "%s" % ("{:,.9f}".format(amount/1000000000.))
| 27.711538 | 88 | 0.62526 |
StringIO import StringIO
class DummyStream:
def __init__(self): pass
def write(self,data): pass
def read(self,data): pass
def flush(self): pass
def close(self): pass
def getAppPath():
if hasattr(sys, 'frozen'):
dir_path = os.path.dirname(unicode(sys.executable, sys.getfilesystemencoding()))
elif '__file__' in locals():
dir_path = os.path.dirname(unicode(__file__, sys.getfilesystemencoding()))
else:
dir_path = os.getcwdu()
return dir_path
def getHomeDir():
if sys.platform == 'win32':
import winpaths
homedir = winpaths.get_common_appdata()
else:
homedir = os.path.expanduser("~")
return homedir
def getSockDir():
if sys.platform == 'win32':
import winpaths
homedir = winpaths.get_appdata()
else:
homedir = os.path.expanduser("~")
return homedir
def makeDir(d):
if not os.path.exists(d):
os.makedirs(d)
return d
def ensureDir(f):
d = os.path.dirname(f)
if not os.path.exists(d):
os.makedirs(d)
return f
def _xorData(data):
data = [chr(ord(c) ^ 10) for c in data]
return string.join(data, '')
def readFile(path, offset=0, size=-1, xor_data=False):
fd = open(path, 'rb')
fd.seek(offset)
data = fd.read(size)
fd.close()
return _xorData(data) if xor_data else data
def writeFile(path, buf, offset=0, xor_data=False):
if xor_data:
buf = _xorData(buf)
fd = open(path, 'wb')
fd.seek(offset)
fd.write(buf)
fd.close()
return len(buf)
def print_money(amount):
try:
amount = int(amount)
except:
raise Exception("Error parsing amount. Money amount must be an integer.")
return "%s <small>GAC</small>" % ("{:,.9f}".format(amount/1000000000.))
def print_money2(amount):
try:
amount = int(amount)
except:
raise Exception("Error parsing amount. Money amount must be an integer.")
return "%s" % ("{:,.9f}".format(amount/1000000000.))
| true | true |
1c384cb0654489f18fed0bfc27fa098d462fab70 | 2,131 | py | Python | core/tests.py | Wanderer2436/django_pharmacy | 2e12c41e30f2f2e2c0f3abdaded98a917420f5b8 | [
"MIT"
] | null | null | null | core/tests.py | Wanderer2436/django_pharmacy | 2e12c41e30f2f2e2c0f3abdaded98a917420f5b8 | [
"MIT"
] | 2 | 2022-03-31T14:34:44.000Z | 2022-03-31T14:35:17.000Z | core/tests.py | Wanderer2436/django_pharmacy | 2e12c41e30f2f2e2c0f3abdaded98a917420f5b8 | [
"MIT"
] | null | null | null | from django.test import TestCase, Client, RequestFactory
from django.urls import reverse
from core import models
class ProductSearchTestCase(TestCase):
def setUp(self):
self.client = Client()
category1 = models.Category.objects.create(name='TestCat1')
category2 = models.Category.objects.create(name='TestCat2')
self.product1 = models.Product.objects.create(name='Test1', category=category1, price=10)
self.product2 = models.Product.objects.create(name='Test2', category=category2, price=25)
def testWithoutParams(self):
response = self.client.get(reverse('core:catalog'))
self.assertSequenceEqual(
list(response.context['products']),
list(models.Product.objects.all()),
'При поиске без параметров должны выводиться все продукты',
)
def testSearchByName(self):
response = self.client.get(reverse('core:catalog'), data={'name': 'Test1'})
self.assertEqual(1, response.context['products'].count())
self.assertEqual(
'Test1',
response.context['object_list'].first().name,
)
def testProductDetail(self):
response = self.client.get('/catalog/product/1/')
self.assertEqual(response.status_code, 200)
class PharmacySearchTestCase(TestCase):
def setUp(self):
self.client = Client()
self.pharmacy1 = models.Pharmacy.objects.create(name='Test1')
self.pharmacy2 = models.Pharmacy.objects.create(name='Test2')
def testWithoutParams(self):
response = self.client.get(reverse('core:pharmacy'))
self.assertSequenceEqual(
list(response.context['object_list']),
list(models.Pharmacy.objects.all()),
'При поиске без параметров должны выводиться все аптеки',
)
def testSearchByName(self):
response = self.client.get(reverse('core:pharmacy'), data={'name': 'Test1'})
self.assertEqual(2, response.context['object_list'].count())
self.assertEqual(
'Test1',
response.context['object_list'].first().name,
)
| 38.053571 | 97 | 0.650399 | from django.test import TestCase, Client, RequestFactory
from django.urls import reverse
from core import models
class ProductSearchTestCase(TestCase):
def setUp(self):
self.client = Client()
category1 = models.Category.objects.create(name='TestCat1')
category2 = models.Category.objects.create(name='TestCat2')
self.product1 = models.Product.objects.create(name='Test1', category=category1, price=10)
self.product2 = models.Product.objects.create(name='Test2', category=category2, price=25)
def testWithoutParams(self):
response = self.client.get(reverse('core:catalog'))
self.assertSequenceEqual(
list(response.context['products']),
list(models.Product.objects.all()),
'При поиске без параметров должны выводиться все продукты',
)
def testSearchByName(self):
response = self.client.get(reverse('core:catalog'), data={'name': 'Test1'})
self.assertEqual(1, response.context['products'].count())
self.assertEqual(
'Test1',
response.context['object_list'].first().name,
)
def testProductDetail(self):
response = self.client.get('/catalog/product/1/')
self.assertEqual(response.status_code, 200)
class PharmacySearchTestCase(TestCase):
def setUp(self):
self.client = Client()
self.pharmacy1 = models.Pharmacy.objects.create(name='Test1')
self.pharmacy2 = models.Pharmacy.objects.create(name='Test2')
def testWithoutParams(self):
response = self.client.get(reverse('core:pharmacy'))
self.assertSequenceEqual(
list(response.context['object_list']),
list(models.Pharmacy.objects.all()),
'При поиске без параметров должны выводиться все аптеки',
)
def testSearchByName(self):
response = self.client.get(reverse('core:pharmacy'), data={'name': 'Test1'})
self.assertEqual(2, response.context['object_list'].count())
self.assertEqual(
'Test1',
response.context['object_list'].first().name,
)
| true | true |
1c384ccc0e50dbc39e26da9575b6526332b32a5b | 1,454 | py | Python | jdcloud_sdk/services/antipro/apis/DescribeAttackSourceRequest.py | jdcloud-apigateway/jdcloud-sdk-python | 0886769bcf1fb92128a065ff0f4695be099571cc | [
"Apache-2.0"
] | null | null | null | jdcloud_sdk/services/antipro/apis/DescribeAttackSourceRequest.py | jdcloud-apigateway/jdcloud-sdk-python | 0886769bcf1fb92128a065ff0f4695be099571cc | [
"Apache-2.0"
] | null | null | null | jdcloud_sdk/services/antipro/apis/DescribeAttackSourceRequest.py | jdcloud-apigateway/jdcloud-sdk-python | 0886769bcf1fb92128a065ff0f4695be099571cc | [
"Apache-2.0"
] | null | null | null | # coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class DescribeAttackSourceRequest(JDCloudRequest):
"""
查询攻击来源
"""
def __init__(self, parameters, header=None, version="v1"):
super(DescribeAttackSourceRequest, self).__init__(
'/attacklog/{attackLogId}:describeAttackSource', 'GET', header, version)
self.parameters = parameters
class DescribeAttackSourceParameters(object):
def __init__(self, attackLogId, ip):
"""
:param attackLogId: 攻击记录 Id
:param ip: DDoS 防护包已防护的公网 IP. <br>- 使用 <a href='http://docs.jdcloud.com/anti-ddos-protection-package/api/describeprotectediplist'>describeProtectedIpList</a> 接口查询 DDoS 防护包已防护的公网 IP
"""
self.attackLogId = attackLogId
self.ip = ip
| 33.045455 | 188 | 0.722146 |
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class DescribeAttackSourceRequest(JDCloudRequest):
def __init__(self, parameters, header=None, version="v1"):
super(DescribeAttackSourceRequest, self).__init__(
'/attacklog/{attackLogId}:describeAttackSource', 'GET', header, version)
self.parameters = parameters
class DescribeAttackSourceParameters(object):
def __init__(self, attackLogId, ip):
self.attackLogId = attackLogId
self.ip = ip
| true | true |
1c384d63920592db9836d8b889c140103e7fa797 | 1,867 | py | Python | pdf_render/helpers/dados_testes.py | mauriciolongato/costurApp | 7a700ef43e94023ee94c962873f9e2324e0f7530 | [
"MIT"
] | 1 | 2019-05-12T19:16:07.000Z | 2019-05-12T19:16:07.000Z | pdf_render/helpers/dados_testes.py | mauriciolongato/costurApp | 7a700ef43e94023ee94c962873f9e2324e0f7530 | [
"MIT"
] | null | null | null | pdf_render/helpers/dados_testes.py | mauriciolongato/costurApp | 7a700ef43e94023ee94c962873f9e2324e0f7530 | [
"MIT"
] | null | null | null | from helpers import jobs, functions
def informacao_caixa():
data = {'header': {'nome_cliente': 'Polo',
'nome_caixa': 1,
'pedido_op': 'REF: COD10000001',
'referencia_id': 1,
'nome_referencia': 'Camisa Polo com Bolso',
'nome_sequencia': 'Fazer Camisa Polo Com Bolso'
},
'body': [{'maquina': 'Maquina de Costura',
'ordem_execucao': '1',
'nome_acao': 'Costura Bolso',
'tempo_medio': '1000',
'cod_bar': 987654321,
# 'cod_bar_address': '{}'.format(jobs.create_barcode(987654321)),
'cod_bar_address': functions.path2url('static/{}'.format(jobs.create_barcode(987654321))),
},
{'maquina': 'Maquina de Costura',
'ordem_execucao': '2',
'nome_acao': 'Costura Tronco',
'tempo_medio': '1000',
'cod_bar': 123456789,
# 'cod_bar_address': '{}'.format(jobs.create_barcode(123456789)),
'cod_bar_address': functions.path2url('static/{}'.format(jobs.create_barcode(123456789))),
},
{'maquina': 'Maquina de Costura',
'ordem_execucao': '1',
'nome_acao': 'Costura Bolso',
'tempo_medio': '1000',
'cod_bar': 987654321,
# 'cod_bar_address': '{}'.format(jobs.create_barcode(987654321)),
'cod_bar_address': functions.path2url('static/{}'.format(jobs.create_barcode(987654321))),
}
]
}
return data
| 47.871795 | 112 | 0.447242 | from helpers import jobs, functions
def informacao_caixa():
data = {'header': {'nome_cliente': 'Polo',
'nome_caixa': 1,
'pedido_op': 'REF: COD10000001',
'referencia_id': 1,
'nome_referencia': 'Camisa Polo com Bolso',
'nome_sequencia': 'Fazer Camisa Polo Com Bolso'
},
'body': [{'maquina': 'Maquina de Costura',
'ordem_execucao': '1',
'nome_acao': 'Costura Bolso',
'tempo_medio': '1000',
'cod_bar': 987654321,
'cod_bar_address': functions.path2url('static/{}'.format(jobs.create_barcode(987654321))),
},
{'maquina': 'Maquina de Costura',
'ordem_execucao': '2',
'nome_acao': 'Costura Tronco',
'tempo_medio': '1000',
'cod_bar': 123456789,
'cod_bar_address': functions.path2url('static/{}'.format(jobs.create_barcode(123456789))),
},
{'maquina': 'Maquina de Costura',
'ordem_execucao': '1',
'nome_acao': 'Costura Bolso',
'tempo_medio': '1000',
'cod_bar': 987654321,
'cod_bar_address': functions.path2url('static/{}'.format(jobs.create_barcode(987654321))),
}
]
}
return data
| true | true |
1c384d76c4cde6b1c2953f675082b314778a5507 | 966 | py | Python | pdb2pqr/tests/propka_test.py | ashermancinelli/apbs-pdb2pqr | 0b1bc0126331cf3f1e08667ccc70dae8eda5cd00 | [
"BSD-3-Clause"
] | null | null | null | pdb2pqr/tests/propka_test.py | ashermancinelli/apbs-pdb2pqr | 0b1bc0126331cf3f1e08667ccc70dae8eda5cd00 | [
"BSD-3-Clause"
] | null | null | null | pdb2pqr/tests/propka_test.py | ashermancinelli/apbs-pdb2pqr | 0b1bc0126331cf3f1e08667ccc70dae8eda5cd00 | [
"BSD-3-Clause"
] | null | null | null | """Tests for PROPKA functionality."""
import logging
from pathlib import Path
import pytest
import common
_LOGGER = logging.getLogger(__name__)
@pytest.mark.parametrize("input_pdb", ["1K1I", "1AFS", "1FAS", "5DV8", "5D8V"], ids=str)
def test_propka_apo(input_pdb, tmp_path):
"""PROPKA non-regression tests on proteins without ligands."""
args = "--log-level=INFO --ff=AMBER --drop-water --titration-state-method=propka"
output_pqr = Path(input_pdb).stem + ".pqr"
common.run_pdb2pqr(args=args, input_pdb=input_pdb, output_pqr=output_pqr,
tmp_path=tmp_path)
# @pytest.mark.parametrize("input_pdb", ["1K1I", "1FAS"], ids=str)
# def test_propka_apo(input_pdb, tmp_path):
# """PROPKA titration of proteins without ligands."""
# args = "--log-level=INFO --ff=AMBER --drop-water --titration-state-method=propka"
# output_pqr = Path(input_pdb).stem + ".pqr"
# run_pdb2pqr(args, input_pdb, output_pqr, tmp_path)
| 35.777778 | 88 | 0.693582 | import logging
from pathlib import Path
import pytest
import common
_LOGGER = logging.getLogger(__name__)
@pytest.mark.parametrize("input_pdb", ["1K1I", "1AFS", "1FAS", "5DV8", "5D8V"], ids=str)
def test_propka_apo(input_pdb, tmp_path):
args = "--log-level=INFO --ff=AMBER --drop-water --titration-state-method=propka"
output_pqr = Path(input_pdb).stem + ".pqr"
common.run_pdb2pqr(args=args, input_pdb=input_pdb, output_pqr=output_pqr,
tmp_path=tmp_path)
| true | true |
1c384db9b4b4396fd411433687e067b395fac623 | 1,681 | py | Python | pakaman/config/targets.py | mikonse/pakaman | 4b6ac2ec550bfd642d34dce1fc406f2bd230e8de | [
"MIT"
] | 1 | 2020-10-18T17:57:30.000Z | 2020-10-18T17:57:30.000Z | pakaman/config/targets.py | mikonse/pakaman | 4b6ac2ec550bfd642d34dce1fc406f2bd230e8de | [
"MIT"
] | null | null | null | pakaman/config/targets.py | mikonse/pakaman | 4b6ac2ec550bfd642d34dce1fc406f2bd230e8de | [
"MIT"
] | null | null | null | from abc import ABCMeta
from typing import List, Dict
import schema
TARGETS = {}
class TargetMeta(ABCMeta):
def __init__(cls, name, bases, classdict):
super().__init__(name, bases, classdict)
TARGETS[cls.name] = cls
class Target(metaclass=TargetMeta):
name = "unknown"
schema = schema.Schema({"name": str, schema.Optional("dependencies"): [str]}, ignore_extra_keys=True)
def __init__(self, dependencies: List[str]):
self.dependencies = dependencies
@classmethod
def from_config(cls, config: Dict) -> "Target":
data = cls.schema.validate(config)
if data["name"] not in TARGETS:
raise KeyError(f"unknown target {data['name']} given in config file")
data["dependencies"] = data.get("dependencies", [])
target = TARGETS[data.pop("name")]
target_data = target.schema.validate(config)
return target(**data, **target_data)
class Debian(Target):
name = "debian"
schema = schema.Schema(
{"dist_version": str, schema.Optional("compat_version", default=11): int, "architecture": str},
ignore_extra_keys=True)
def __init__(self, dependencies: List[str], dist_version: str, compat_version: int, architecture: str):
super().__init__(dependencies)
self.dist_version = dist_version
self.compat_version = compat_version
self.architecture = architecture
class Arch(Target):
name = "arch"
schema = schema.Schema({"architecture": str}, ignore_extra_keys=True)
def __init__(self, dependencies: List[str], architecture: str):
super().__init__(dependencies)
self.architecture = architecture
| 30.017857 | 107 | 0.66627 | from abc import ABCMeta
from typing import List, Dict
import schema
TARGETS = {}
class TargetMeta(ABCMeta):
def __init__(cls, name, bases, classdict):
super().__init__(name, bases, classdict)
TARGETS[cls.name] = cls
class Target(metaclass=TargetMeta):
name = "unknown"
schema = schema.Schema({"name": str, schema.Optional("dependencies"): [str]}, ignore_extra_keys=True)
def __init__(self, dependencies: List[str]):
self.dependencies = dependencies
@classmethod
def from_config(cls, config: Dict) -> "Target":
data = cls.schema.validate(config)
if data["name"] not in TARGETS:
raise KeyError(f"unknown target {data['name']} given in config file")
data["dependencies"] = data.get("dependencies", [])
target = TARGETS[data.pop("name")]
target_data = target.schema.validate(config)
return target(**data, **target_data)
class Debian(Target):
name = "debian"
schema = schema.Schema(
{"dist_version": str, schema.Optional("compat_version", default=11): int, "architecture": str},
ignore_extra_keys=True)
def __init__(self, dependencies: List[str], dist_version: str, compat_version: int, architecture: str):
super().__init__(dependencies)
self.dist_version = dist_version
self.compat_version = compat_version
self.architecture = architecture
class Arch(Target):
name = "arch"
schema = schema.Schema({"architecture": str}, ignore_extra_keys=True)
def __init__(self, dependencies: List[str], architecture: str):
super().__init__(dependencies)
self.architecture = architecture
| true | true |
1c384df1fb49a37e50cba49829227f17caf3f362 | 1,404 | py | Python | AutoTriageBot/DataTypes.py | jaydave/AutoTriageBot | d87898ec11d6057797697ffa867ccb52770e65ec | [
"BSD-3-Clause"
] | 58 | 2017-09-20T16:50:25.000Z | 2021-05-15T10:22:53.000Z | AutoTriageBot/DataTypes.py | jaydave/AutoTriageBot | d87898ec11d6057797697ffa867ccb52770e65ec | [
"BSD-3-Clause"
] | 2 | 2017-09-21T17:04:10.000Z | 2017-09-25T23:29:20.000Z | AutoTriageBot/DataTypes.py | jaydave/AutoTriageBot | d87898ec11d6057797697ffa867ccb52770e65ec | [
"BSD-3-Clause"
] | 16 | 2017-09-21T00:48:37.000Z | 2021-01-02T20:22:39.000Z | """
Copyright (c) 2017, salesforce.com, inc.
All rights reserved.
Licensed under the BSD 3-Clause license.
For full license text, see LICENSE.txt file in the repo root or https://opensource.org/licenses/BSD-3-Clause
"""
from typing import NamedTuple, Mapping, Any
from datetime import datetime
# Represents the results of a vulnerability test
VulnTestInfo = NamedTuple("VulnTestInfo", [('reproduced', bool),
('info', Mapping[str, Any]),
('message', str),
('type', str)])
# A simple representation of a report (used for duplicate testing purposes)
ReportData = NamedTuple('ReportData', [('title', str),
('body', str),
('time', datetime),
('state', str),
('id', str),
('weakness', str)])
# Used to represent a parsed and normalized URL
URLParts = NamedTuple("URLParts", [('domain', str),
('path', str),
('queries', Mapping[str, str])])
# Used to hold information about expected bounties
BountyInfo = NamedTuple('BountyInfo', [('average', float),
('std', float)])
| 42.545455 | 109 | 0.487179 |
from typing import NamedTuple, Mapping, Any
from datetime import datetime
VulnTestInfo = NamedTuple("VulnTestInfo", [('reproduced', bool),
('info', Mapping[str, Any]),
('message', str),
('type', str)])
ReportData = NamedTuple('ReportData', [('title', str),
('body', str),
('time', datetime),
('state', str),
('id', str),
('weakness', str)])
URLParts = NamedTuple("URLParts", [('domain', str),
('path', str),
('queries', Mapping[str, str])])
BountyInfo = NamedTuple('BountyInfo', [('average', float),
('std', float)])
| true | true |
1c384df681c217a87980cd5a3d393f24b508e6e5 | 884 | py | Python | annotate_updated.py | VictoriaSugrue/sheepclock | b0eaec0b96afcc35f0d60982eb3d1215ea329d64 | [
"MIT"
] | null | null | null | annotate_updated.py | VictoriaSugrue/sheepclock | b0eaec0b96afcc35f0d60982eb3d1215ea329d64 | [
"MIT"
] | null | null | null | annotate_updated.py | VictoriaSugrue/sheepclock | b0eaec0b96afcc35f0d60982eb3d1215ea329d64 | [
"MIT"
] | null | null | null | import os
import glob
import numpy as np
import pandas as pd
filenames = [os.path.basename(x) for x in glob.glob("analysis_raw_results/*.bed")]
metadata = pd.read_table('human_factor_full_QC.txt')
for file in filenames:
temp_name = os.path.splitext(file)[0]
df_temp = pd.read_csv("analysis_raw_results/"+str(file),sep="\t",header=None)
df_temp.columns = ['chromCG','CGstart','CGend','CGid','filename','bed_col_1','bed_col_2','bed_col_3','bed_col_4','bed_col_5','bed_col_6','bed_col_7','bed_col_8','bed_col_9','bed_col_10']
df_temp["DCid"] = df_temp["filename"].str.replace('human_factor_split/human_factor_[0-9][0-9][0-9]/|_sort_peaks.narrowPeak.bed','').astype('int64')
df_output = df_temp.merge(metadata, on='DCid', how='left')
df_output = df_output.sort_values(by=['CGid'])
df_output.to_csv("analysis_results/" + str(temp_name) + ".csv",index=False)
| 49.111111 | 190 | 0.717195 | import os
import glob
import numpy as np
import pandas as pd
filenames = [os.path.basename(x) for x in glob.glob("analysis_raw_results/*.bed")]
metadata = pd.read_table('human_factor_full_QC.txt')
for file in filenames:
temp_name = os.path.splitext(file)[0]
df_temp = pd.read_csv("analysis_raw_results/"+str(file),sep="\t",header=None)
df_temp.columns = ['chromCG','CGstart','CGend','CGid','filename','bed_col_1','bed_col_2','bed_col_3','bed_col_4','bed_col_5','bed_col_6','bed_col_7','bed_col_8','bed_col_9','bed_col_10']
df_temp["DCid"] = df_temp["filename"].str.replace('human_factor_split/human_factor_[0-9][0-9][0-9]/|_sort_peaks.narrowPeak.bed','').astype('int64')
df_output = df_temp.merge(metadata, on='DCid', how='left')
df_output = df_output.sort_values(by=['CGid'])
df_output.to_csv("analysis_results/" + str(temp_name) + ".csv",index=False)
| true | true |
1c384e0d64e076b98463cdbac0355c84829ca88c | 1,706 | py | Python | pastey/config.py | WolfwithSword/pastey | 89b8c7a4da8a51a25270a2c9f3e6d7526c8211ad | [
"BSD-3-Clause"
] | 65 | 2021-04-13T19:10:03.000Z | 2022-02-03T01:36:04.000Z | pastey/config.py | WolfwithSword/pastey | 89b8c7a4da8a51a25270a2c9f3e6d7526c8211ad | [
"BSD-3-Clause"
] | 15 | 2021-04-14T11:57:43.000Z | 2022-03-22T02:47:15.000Z | pastey/config.py | WolfwithSword/pastey | 89b8c7a4da8a51a25270a2c9f3e6d7526c8211ad | [
"BSD-3-Clause"
] | 8 | 2021-04-15T19:18:00.000Z | 2022-02-03T03:02:27.000Z | # Data directory
data_directory = "./data"
# Listen address
listen_address = "0.0.0.0"
# Listen port
listen_port = 5000
# Use whitelisting
# Whitelisted IPs can view recent pastes on the home page, as well as delete pastes
# For limiting pasting to whitelisted users, enable the "restrict_pasting" option below
use_whitelist = True
# Whitelist CIDR
whitelist_cidr = ['127.0.0.1/32', '10.0.0.0/8', '172.16.0.0/12', '192.168.0.0/16']
# Blacklist CIDR
blacklist_cidr = []
# Restrict pasting functionality to whitelisted IPs
restrict_pasting = False
# Rate limit for pasting (ignored for whitelisted users)
rate_limit = "5/hour"
# Guess threshold for automatic language detection
guess_threshold = 0.20
# Number of recent pastes to show on the home page
recent_pastes = 10
# Try to use X-Real-IP or X-Forwarded-For HTTP headers
behind_proxy = False
# Default theme to display to users
default_theme = "Light"
# Purge interval (in seconds) for checking expired pastes
purge_interval = 3600
# Show recent pastes, even to non-whitelisted users (without a delete button)
force_show_recent = False
# Ignore these classifications for language detection
ignore_guess = ['TeX', 'SQL']
# Show CLI button on home page
show_cli_button = True
# Include https in the generated links instead of http
# This assumes you are behind something else doing the SSL
# termination, but want the users to see https links
#
# This is normally handled by the HTTP headers now
force_https_links = False
# This can be used to specify a different domain for generated links
#
# Note: exclude the http:// or https:// prefix, as well as anything
# following the domain (except the port, if applicable)
override_domain = "" | 27.516129 | 87 | 0.759086 |
data_directory = "./data"
listen_address = "0.0.0.0"
listen_port = 5000
use_whitelist = True
whitelist_cidr = ['127.0.0.1/32', '10.0.0.0/8', '172.16.0.0/12', '192.168.0.0/16']
blacklist_cidr = []
restrict_pasting = False
rate_limit = "5/hour"
guess_threshold = 0.20
recent_pastes = 10
behind_proxy = False
default_theme = "Light"
purge_interval = 3600
force_show_recent = False
ignore_guess = ['TeX', 'SQL']
show_cli_button = True
force_https_links = False
override_domain = "" | true | true |
1c384e41969475a7829dcd8b5e539e84df26a381 | 1,102 | py | Python | 25aapairs.py | 5ophia5/Homework | 7b756e5dd36bf5425a5ba67b7263a45ec2648510 | [
"MIT"
] | null | null | null | 25aapairs.py | 5ophia5/Homework | 7b756e5dd36bf5425a5ba67b7263a45ec2648510 | [
"MIT"
] | null | null | null | 25aapairs.py | 5ophia5/Homework | 7b756e5dd36bf5425a5ba67b7263a45ec2648510 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Print out all the unique pairwise amino acid combinations
# AC is the same as CA
# Skip AA, CC etc.
# Also print out how many combinations there are
aa = 'ACDEFGHIKLMNPQRSTVWY'
combo = 0
for aa1 in aa:
for aa2 in aa:
if aa2 > aa1:
print (aa1, aa2)
combo +=1
print(combo)
"""
python3 25aapairs.py
A C
A D
A E
A F
A G
A H
A I
A K
A L
A M
A N
A P
A Q
A R
A S
A T
A V
A W
A Y
C D
C E
C F
C G
C H
C I
C K
C L
C M
C N
C P
C Q
C R
C S
C T
C V
C W
C Y
D E
D F
D G
D H
D I
D K
D L
D M
D N
D P
D Q
D R
D S
D T
D V
D W
D Y
E F
E G
E H
E I
E K
E L
E M
E N
E P
E Q
E R
E S
E T
E V
E W
E Y
F G
F H
F I
F K
F L
F M
F N
F P
F Q
F R
F S
F T
F V
F W
F Y
G H
G I
G K
G L
G M
G N
G P
G Q
G R
G S
G T
G V
G W
G Y
H I
H K
H L
H M
H N
H P
H Q
H R
H S
H T
H V
H W
H Y
I K
I L
I M
I N
I P
I Q
I R
I S
I T
I V
I W
I Y
K L
K M
K N
K P
K Q
K R
K S
K T
K V
K W
K Y
L M
L N
L P
L Q
L R
L S
L T
L V
L W
L Y
M N
M P
M Q
M R
M S
M T
M V
M W
M Y
N P
N Q
N R
N S
N T
N V
N W
N Y
P Q
P R
P S
P T
P V
P W
P Y
Q R
Q S
Q T
Q V
Q W
Q Y
R S
R T
R V
R W
R Y
S T
S V
S W
S Y
T V
T W
T Y
V W
V Y
W Y
190
"""
| 5.222749 | 59 | 0.557169 |
aa = 'ACDEFGHIKLMNPQRSTVWY'
combo = 0
for aa1 in aa:
for aa2 in aa:
if aa2 > aa1:
print (aa1, aa2)
combo +=1
print(combo)
| true | true |
1c384e66487612e80be41bfcb84c82317974ff4d | 3,353 | py | Python | db/migrations/replaceableObjects.py | DeschutesBrewery/brewerypi | 5459dfc6b1ed415920c13a8a7c9a2d3d3c82099f | [
"MIT"
] | 27 | 2017-11-27T05:01:05.000Z | 2020-11-14T19:52:26.000Z | db/migrations/replaceableObjects.py | DeschutesBrewery/brewerypi | 5459dfc6b1ed415920c13a8a7c9a2d3d3c82099f | [
"MIT"
] | 259 | 2017-11-23T00:43:26.000Z | 2020-11-03T01:07:30.000Z | db/migrations/replaceableObjects.py | DeschutesBrewery/brewerypi | 5459dfc6b1ed415920c13a8a7c9a2d3d3c82099f | [
"MIT"
] | 8 | 2018-10-29T04:39:29.000Z | 2020-10-01T22:18:12.000Z | from alembic.operations import Operations, MigrateOperation
# Concepts from: http://alembic.zzzcomputing.com/en/latest/cookbook.html#replaceable-objects.
class ReplaceableObject(object):
def __init__(self, name, parameters, returns, sqlText):
self.name = name
self.parameters = parameters
self.returns = returns
self.sqlText = sqlText
class ReversibleOperation(MigrateOperation):
def __init__(self, target):
self.target = target
@classmethod
def invokeForTarget(cls, operations, target):
op = cls(target)
return operations.invoke(op)
def reverse(self):
raise NotImplementedError()
@classmethod
def _get_object_from_version(cls, operations, ident):
version, objname = ident.split(".")
module = operations.get_context().script.get_revision(version).module
obj = getattr(module, objname)
return obj
@classmethod
def replace(cls, operations, target, replaces = None, replaceWith = None):
if replaces:
oldObject = cls._get_object_from_version(operations, replaces)
dropOld = cls(oldObject).reverse()
createNew = cls(target)
elif replaceWith:
oldObject = cls._get_object_from_version(operations, replaceWith)
dropOld = cls(target).reverse()
createNew = cls(oldObject)
else:
raise TypeError("replaces or replaceWith is required")
operations.invoke(dropOld)
operations.invoke(createNew)
@Operations.register_operation("createStoredProcedure", "invokeForTarget")
@Operations.register_operation("replaceStoredProcedure", "replace")
class CreateStoredProcedureOperation(ReversibleOperation):
def reverse(self):
return DropStoredProcedureOperation(self.target)
@Operations.register_operation("dropStoredProcedure", "invokeForTarget")
class DropStoredProcedureOperation(ReversibleOperation):
def reverse(self):
return CreateStoredProcedureOperation(self.target)
@Operations.implementation_for(CreateStoredProcedureOperation)
def createStoredProcedure(operations, operation):
operations.execute("CREATE PROCEDURE %s(%s) %s" % (operation.target.name, operation.target.parameters, operation.target.sqlText))
@Operations.implementation_for(DropStoredProcedureOperation)
def dropStoredProcedure(operations, operation):
operations.execute("DROP PROCEDURE %s" % operation.target.name)
@Operations.register_operation("createFunction", "invokeForTarget")
@Operations.register_operation("replaceFunction", "replace")
class CreateFunctionOperation(ReversibleOperation):
def reverse(self):
return DropFunctionOperation(self.target)
@Operations.register_operation("dropFunction", "invokeForTarget")
class DropFunctionOperation(ReversibleOperation):
def reverse(self):
return CreateFunctionOperation(self.target)
@Operations.implementation_for(CreateFunctionOperation)
def createFunction(operations, operation):
operations.execute("CREATE FUNCTION %s(%s) RETURNS %s %s" % (operation.target.name, operation.target.parameters, operation.target.returns,
operation.target.sqlText))
@Operations.implementation_for(DropFunctionOperation)
def dropFunction(operations, operation):
operations.execute("DROP FUNCTION %s" % operation.target.name)
| 39.447059 | 142 | 0.742917 | from alembic.operations import Operations, MigrateOperation
ject(object):
def __init__(self, name, parameters, returns, sqlText):
self.name = name
self.parameters = parameters
self.returns = returns
self.sqlText = sqlText
class ReversibleOperation(MigrateOperation):
def __init__(self, target):
self.target = target
@classmethod
def invokeForTarget(cls, operations, target):
op = cls(target)
return operations.invoke(op)
def reverse(self):
raise NotImplementedError()
@classmethod
def _get_object_from_version(cls, operations, ident):
version, objname = ident.split(".")
module = operations.get_context().script.get_revision(version).module
obj = getattr(module, objname)
return obj
@classmethod
def replace(cls, operations, target, replaces = None, replaceWith = None):
if replaces:
oldObject = cls._get_object_from_version(operations, replaces)
dropOld = cls(oldObject).reverse()
createNew = cls(target)
elif replaceWith:
oldObject = cls._get_object_from_version(operations, replaceWith)
dropOld = cls(target).reverse()
createNew = cls(oldObject)
else:
raise TypeError("replaces or replaceWith is required")
operations.invoke(dropOld)
operations.invoke(createNew)
@Operations.register_operation("createStoredProcedure", "invokeForTarget")
@Operations.register_operation("replaceStoredProcedure", "replace")
class CreateStoredProcedureOperation(ReversibleOperation):
def reverse(self):
return DropStoredProcedureOperation(self.target)
@Operations.register_operation("dropStoredProcedure", "invokeForTarget")
class DropStoredProcedureOperation(ReversibleOperation):
def reverse(self):
return CreateStoredProcedureOperation(self.target)
@Operations.implementation_for(CreateStoredProcedureOperation)
def createStoredProcedure(operations, operation):
operations.execute("CREATE PROCEDURE %s(%s) %s" % (operation.target.name, operation.target.parameters, operation.target.sqlText))
@Operations.implementation_for(DropStoredProcedureOperation)
def dropStoredProcedure(operations, operation):
operations.execute("DROP PROCEDURE %s" % operation.target.name)
@Operations.register_operation("createFunction", "invokeForTarget")
@Operations.register_operation("replaceFunction", "replace")
class CreateFunctionOperation(ReversibleOperation):
def reverse(self):
return DropFunctionOperation(self.target)
@Operations.register_operation("dropFunction", "invokeForTarget")
class DropFunctionOperation(ReversibleOperation):
def reverse(self):
return CreateFunctionOperation(self.target)
@Operations.implementation_for(CreateFunctionOperation)
def createFunction(operations, operation):
operations.execute("CREATE FUNCTION %s(%s) RETURNS %s %s" % (operation.target.name, operation.target.parameters, operation.target.returns,
operation.target.sqlText))
@Operations.implementation_for(DropFunctionOperation)
def dropFunction(operations, operation):
operations.execute("DROP FUNCTION %s" % operation.target.name)
| true | true |
1c384ed35a686e610c02507262b0e05c5705abb5 | 599 | py | Python | Curso_de_Python/Mundo_02/Aula_14/Exercicios/ex058.py | tarcisioribeiro/Python | 69b8fee218a6c9b93ae7a6ea36eb903d5ac36955 | [
"MIT"
] | null | null | null | Curso_de_Python/Mundo_02/Aula_14/Exercicios/ex058.py | tarcisioribeiro/Python | 69b8fee218a6c9b93ae7a6ea36eb903d5ac36955 | [
"MIT"
] | null | null | null | Curso_de_Python/Mundo_02/Aula_14/Exercicios/ex058.py | tarcisioribeiro/Python | 69b8fee218a6c9b93ae7a6ea36eb903d5ac36955 | [
"MIT"
] | null | null | null | from random import randint
print()
print('Tente adivinhar o número que estou pensando!')
print()
npc = randint(0, 10)
nuser = -1
palpite = 0
while (npc != nuser):
nuser = int(input('Qual o número que eu pensei? '))
print()
if(nuser == npc):
print('Parabéns! Você acertou!')
print()
elif(nuser < npc):
print('Mais! Tente novamente.')
print()
palpite += 1
elif(nuser > npc):
print('Menos! Tente novamente.')
print()
palpite += 1
print('Foram necessárias {} tentativas para acertar o número.'.format(palpite))
print()
| 24.958333 | 79 | 0.597663 | from random import randint
print()
print('Tente adivinhar o número que estou pensando!')
print()
npc = randint(0, 10)
nuser = -1
palpite = 0
while (npc != nuser):
nuser = int(input('Qual o número que eu pensei? '))
print()
if(nuser == npc):
print('Parabéns! Você acertou!')
print()
elif(nuser < npc):
print('Mais! Tente novamente.')
print()
palpite += 1
elif(nuser > npc):
print('Menos! Tente novamente.')
print()
palpite += 1
print('Foram necessárias {} tentativas para acertar o número.'.format(palpite))
print()
| true | true |
1c384f35d72b0cdb3ecdc5e040b85c709490a00d | 3,271 | py | Python | python/pyarrow/tests/test_misc.py | tanyaschlusser/arrow | 77c6cd3237a2b305cf9c086cbf8d0a49be016701 | [
"Apache-2.0",
"CC0-1.0"
] | 1 | 2021-01-28T17:30:06.000Z | 2021-01-28T17:30:06.000Z | python/pyarrow/tests/test_misc.py | ivanyu/arrow | 5d1934fc3f5c65f70a3966b71c68941b2fd8d362 | [
"Apache-2.0",
"CC0-1.0"
] | null | null | null | python/pyarrow/tests/test_misc.py | ivanyu/arrow | 5d1934fc3f5c65f70a3966b71c68941b2fd8d362 | [
"Apache-2.0",
"CC0-1.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import pytest
import pyarrow as pa
def test_get_include():
include_dir = pa.get_include()
assert os.path.exists(os.path.join(include_dir, 'arrow', 'api.h'))
@pytest.mark.skipif('sys.platform != "win32"')
def test_get_library_dirs_win32():
library_dirs = pa.get_library_dirs()
library_lib = library_dirs[-1]
assert os.path.exists(os.path.join(library_lib, 'arrow.lib'))
def test_cpu_count():
n = pa.cpu_count()
assert n > 0
try:
pa.set_cpu_count(n + 5)
assert pa.cpu_count() == n + 5
finally:
pa.set_cpu_count(n)
@pytest.mark.parametrize('klass', [
pa.Field,
pa.Schema,
pa.Column,
pa.ChunkedArray,
pa.RecordBatch,
pa.Table,
pa.Buffer,
pa.Array,
pa.Tensor,
pa.lib.DataType,
pa.lib.ListType,
pa.lib.UnionType,
pa.lib.StructType,
pa.lib.Time32Type,
pa.lib.Time64Type,
pa.lib.TimestampType,
pa.lib.Decimal128Type,
pa.lib.DictionaryType,
pa.lib.FixedSizeBinaryType,
pa.NullArray,
pa.NumericArray,
pa.IntegerArray,
pa.FloatingPointArray,
pa.BooleanArray,
pa.Int8Array,
pa.Int16Array,
pa.Int32Array,
pa.Int64Array,
pa.UInt8Array,
pa.UInt16Array,
pa.UInt32Array,
pa.UInt64Array,
pa.ListArray,
pa.UnionArray,
pa.BinaryArray,
pa.StringArray,
pa.FixedSizeBinaryArray,
pa.DictionaryArray,
pa.Date32Array,
pa.Date64Array,
pa.TimestampArray,
pa.Time32Array,
pa.Time64Array,
pa.Decimal128Array,
pa.StructArray,
pa.ArrayValue,
pa.BooleanValue,
pa.Int8Value,
pa.Int16Value,
pa.Int32Value,
pa.Int64Value,
pa.UInt8Value,
pa.UInt16Value,
pa.UInt32Value,
pa.UInt64Value,
pa.HalfFloatValue,
pa.FloatValue,
pa.DoubleValue,
pa.DecimalValue,
pa.Date32Value,
pa.Date64Value,
pa.Time32Value,
pa.Time64Value,
pa.TimestampValue,
pa.StringValue,
pa.BinaryValue,
pa.FixedSizeBinaryValue,
pa.ListValue,
pa.UnionValue,
pa.StructValue,
pa.DictionaryValue,
pa.ipc.Message,
pa.ipc.MessageReader,
pa.MemoryPool,
pa.LoggingMemoryPool,
pa.ProxyMemoryPool,
])
def test_extension_type_constructor_errors(klass):
# ARROW-2638: prevent calling extension class constructors directly
msg = "Do not call {cls}'s constructor directly, use .* instead."
with pytest.raises(TypeError, match=msg.format(cls=klass.__name__)):
klass()
| 25.161538 | 72 | 0.690614 |
import os
import pytest
import pyarrow as pa
def test_get_include():
include_dir = pa.get_include()
assert os.path.exists(os.path.join(include_dir, 'arrow', 'api.h'))
@pytest.mark.skipif('sys.platform != "win32"')
def test_get_library_dirs_win32():
library_dirs = pa.get_library_dirs()
library_lib = library_dirs[-1]
assert os.path.exists(os.path.join(library_lib, 'arrow.lib'))
def test_cpu_count():
n = pa.cpu_count()
assert n > 0
try:
pa.set_cpu_count(n + 5)
assert pa.cpu_count() == n + 5
finally:
pa.set_cpu_count(n)
@pytest.mark.parametrize('klass', [
pa.Field,
pa.Schema,
pa.Column,
pa.ChunkedArray,
pa.RecordBatch,
pa.Table,
pa.Buffer,
pa.Array,
pa.Tensor,
pa.lib.DataType,
pa.lib.ListType,
pa.lib.UnionType,
pa.lib.StructType,
pa.lib.Time32Type,
pa.lib.Time64Type,
pa.lib.TimestampType,
pa.lib.Decimal128Type,
pa.lib.DictionaryType,
pa.lib.FixedSizeBinaryType,
pa.NullArray,
pa.NumericArray,
pa.IntegerArray,
pa.FloatingPointArray,
pa.BooleanArray,
pa.Int8Array,
pa.Int16Array,
pa.Int32Array,
pa.Int64Array,
pa.UInt8Array,
pa.UInt16Array,
pa.UInt32Array,
pa.UInt64Array,
pa.ListArray,
pa.UnionArray,
pa.BinaryArray,
pa.StringArray,
pa.FixedSizeBinaryArray,
pa.DictionaryArray,
pa.Date32Array,
pa.Date64Array,
pa.TimestampArray,
pa.Time32Array,
pa.Time64Array,
pa.Decimal128Array,
pa.StructArray,
pa.ArrayValue,
pa.BooleanValue,
pa.Int8Value,
pa.Int16Value,
pa.Int32Value,
pa.Int64Value,
pa.UInt8Value,
pa.UInt16Value,
pa.UInt32Value,
pa.UInt64Value,
pa.HalfFloatValue,
pa.FloatValue,
pa.DoubleValue,
pa.DecimalValue,
pa.Date32Value,
pa.Date64Value,
pa.Time32Value,
pa.Time64Value,
pa.TimestampValue,
pa.StringValue,
pa.BinaryValue,
pa.FixedSizeBinaryValue,
pa.ListValue,
pa.UnionValue,
pa.StructValue,
pa.DictionaryValue,
pa.ipc.Message,
pa.ipc.MessageReader,
pa.MemoryPool,
pa.LoggingMemoryPool,
pa.ProxyMemoryPool,
])
def test_extension_type_constructor_errors(klass):
msg = "Do not call {cls}'s constructor directly, use .* instead."
with pytest.raises(TypeError, match=msg.format(cls=klass.__name__)):
klass()
| true | true |
1c3851b3d43779cdb1f4c93db026666f01001b41 | 111 | py | Python | tiingo_prices/__init__.py | robren/tiingo_prices | 3e7f45728b0c5a4a4e60f552b2fda97aa8340a20 | [
"MIT"
] | null | null | null | tiingo_prices/__init__.py | robren/tiingo_prices | 3e7f45728b0c5a4a4e60f552b2fda97aa8340a20 | [
"MIT"
] | 1 | 2020-08-11T18:16:20.000Z | 2020-08-11T18:16:20.000Z | tiingo_prices/__init__.py | robren/tiingo_prices | 3e7f45728b0c5a4a4e60f552b2fda97aa8340a20 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
__author__ = """Robert Rennison"""
__email__ = 'rob@robren.net'
__version__ = '0.3.1'
| 18.5 | 34 | 0.621622 |
__author__ = """Robert Rennison"""
__email__ = 'rob@robren.net'
__version__ = '0.3.1'
| true | true |
1c38532383da6d8d572acc687d189e7ed6fd9a0d | 3,795 | py | Python | snake/base/map.py | AuthurExcalbern/AI-Snake | 5dee76fcecd40add58a432ca8b88880b00ac45b9 | [
"MIT"
] | null | null | null | snake/base/map.py | AuthurExcalbern/AI-Snake | 5dee76fcecd40add58a432ca8b88880b00ac45b9 | [
"MIT"
] | 1 | 2018-03-24T08:50:32.000Z | 2018-03-28T11:11:50.000Z | snake/base/map.py | AuthurExcalbern/AI-Snake | 5dee76fcecd40add58a432ca8b88880b00ac45b9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# pylint: disable=C0103,C0111,W0201,W0212
"""Definition of class Map."""
import random
from snake.base.point import Point, PointType
from snake.base.pos import Pos
class Map:
"""2D game map."""
def __init__(self, num_rows, num_cols):
"""Initialize a Map object."""
if not isinstance(num_rows, int) or not isinstance(num_cols, int):
raise TypeError("\'num_rows\' and \'num_cols\' must be integers")
if num_rows < 5 or num_cols < 5:
raise ValueError("\'num_rows\' and \'num_cols\' must >= 5")
self._num_rows = num_rows
self._num_cols = num_cols
self._capacity = (num_rows - 2) * (num_cols - 2)#去除墙后地图的容量
self._content = [[Point() for _ in range(num_cols)] for _ in range(num_rows)]
self.reset()
def reset(self):
self._food = None
for i in range(self._num_rows):
for j in range(self._num_cols):
if i == 0 or i == self._num_rows - 1 or \
j == 0 or j == self._num_cols - 1:
self._content[i][j].type = PointType.WALL
else:
self._content[i][j].type = PointType.EMPTY
def copy(self):
m_copy = Map(self._num_rows, self._num_cols)
for i in range(self._num_rows):
for j in range(self._num_cols):
m_copy._content[i][j].type = self._content[i][j].type
return m_copy
@property
def num_rows(self):
return self._num_rows
@property
def num_cols(self):
return self._num_cols
@property
def capacity(self):
return self._capacity
@property
def food(self):
return self._food
def point(self, pos):
"""Return a point on the map.
DO NOT directly modify the point type to PointType.FOOD and vice versa.
Use {add|rm}_food() methods instead.
Args:
pos (base.pos.Pos): The position of the point to be fetched
Returns:
snake.point.Point: The point at the given position.
"""
return self._content[pos.x][pos.y]
def is_inside(self, pos):
return pos.x > 0 and pos.x < self.num_rows - 1 \
and pos.y > 0 and pos.y < self.num_cols - 1
def is_empty(self, pos):
return self.is_inside(pos) and self.point(pos).type == PointType.EMPTY
def is_safe(self, pos):
return self.is_inside(pos) and (self.point(pos).type == PointType.EMPTY or \
self.point(pos).type == PointType.FOOD)
def is_full(self):
"""Check if the map is filled with the snake's bodies."""
for i in range(1, self.num_rows - 1):
for j in range(1, self.num_cols - 1):
t = self._content[i][j].type
if t.value < PointType.HEAD_L.value:
return False
return True
def has_food(self):
return self._food is not None
def rm_food(self):
if self.has_food():
self.point(self._food).type = PointType.EMPTY
self._food = None
def create_food(self, pos):
self.point(pos).type = PointType.FOOD
self._food = pos
return self._food
def create_rand_food(self):
empty_pos = []
for i in range(1, self._num_rows - 1):
for j in range(1, self._num_cols - 1):
t = self._content[i][j].type
if t == PointType.EMPTY:
empty_pos.append(Pos(i, j))
elif t == PointType.FOOD:
return None # Stop if food exists
if empty_pos:
return self.create_food(random.choice(empty_pos))
else:
return None
| 30.853659 | 85 | 0.560474 |
import random
from snake.base.point import Point, PointType
from snake.base.pos import Pos
class Map:
def __init__(self, num_rows, num_cols):
if not isinstance(num_rows, int) or not isinstance(num_cols, int):
raise TypeError("\'num_rows\' and \'num_cols\' must be integers")
if num_rows < 5 or num_cols < 5:
raise ValueError("\'num_rows\' and \'num_cols\' must >= 5")
self._num_rows = num_rows
self._num_cols = num_cols
self._capacity = (num_rows - 2) * (num_cols - 2)
self._content = [[Point() for _ in range(num_cols)] for _ in range(num_rows)]
self.reset()
def reset(self):
self._food = None
for i in range(self._num_rows):
for j in range(self._num_cols):
if i == 0 or i == self._num_rows - 1 or \
j == 0 or j == self._num_cols - 1:
self._content[i][j].type = PointType.WALL
else:
self._content[i][j].type = PointType.EMPTY
def copy(self):
m_copy = Map(self._num_rows, self._num_cols)
for i in range(self._num_rows):
for j in range(self._num_cols):
m_copy._content[i][j].type = self._content[i][j].type
return m_copy
@property
def num_rows(self):
return self._num_rows
@property
def num_cols(self):
return self._num_cols
@property
def capacity(self):
return self._capacity
@property
def food(self):
return self._food
def point(self, pos):
return self._content[pos.x][pos.y]
def is_inside(self, pos):
return pos.x > 0 and pos.x < self.num_rows - 1 \
and pos.y > 0 and pos.y < self.num_cols - 1
def is_empty(self, pos):
return self.is_inside(pos) and self.point(pos).type == PointType.EMPTY
def is_safe(self, pos):
return self.is_inside(pos) and (self.point(pos).type == PointType.EMPTY or \
self.point(pos).type == PointType.FOOD)
def is_full(self):
for i in range(1, self.num_rows - 1):
for j in range(1, self.num_cols - 1):
t = self._content[i][j].type
if t.value < PointType.HEAD_L.value:
return False
return True
def has_food(self):
return self._food is not None
def rm_food(self):
if self.has_food():
self.point(self._food).type = PointType.EMPTY
self._food = None
def create_food(self, pos):
self.point(pos).type = PointType.FOOD
self._food = pos
return self._food
def create_rand_food(self):
empty_pos = []
for i in range(1, self._num_rows - 1):
for j in range(1, self._num_cols - 1):
t = self._content[i][j].type
if t == PointType.EMPTY:
empty_pos.append(Pos(i, j))
elif t == PointType.FOOD:
return None
if empty_pos:
return self.create_food(random.choice(empty_pos))
else:
return None
| true | true |
1c385327f6e8f54f165abb2360d61facae4ffb2f | 2,063 | py | Python | tasks/UDEMY/100_days/L003/day-3-1.py | AleksNeStu/projects | 1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb | [
"Apache-2.0"
] | 2 | 2022-01-19T18:01:35.000Z | 2022-02-06T06:54:38.000Z | tasks/UDEMY/100_days/L003/day-3-1.py | AleksNeStu/projects | 1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb | [
"Apache-2.0"
] | null | null | null | tasks/UDEMY/100_days/L003/day-3-1.py | AleksNeStu/projects | 1a4c68dfbdcb77228f0f3617e58fd18fcb1f5dbb | [
"Apache-2.0"
] | null | null | null | # 🚨 Don't change the code below 👇
number = int(input("Which number do you want to check? "))
# 🚨 Don't change the code above 👆
print("This is an even number." if number % 2 == 0
else "This is an odd number.")
#First *fork* your copy. Then copy-paste your code below this line 👇
#Finally click "Run" to execute the tests
#SOLUTION
# #If the number can be divided by 2 with 0 remainder.
# if number % 2 == 0:
# print("This is an even number.")
# #Otherwise (number cannot be divided by 2 with 0 remainder).
# else:
# print("This is an odd number.")
#SOLUTION
#Write your code above this line 👆
# 🚨 Do NOT modify the code below this line 👇
with open('testing_copy.py', 'w') as file:
file.write('def test_func():\n')
with open('day-3-1.py', 'r') as original:
f2 = original.readlines()[0:40]
for x in f2:
file.write(" " + x)
import testing_copy
import unittest
from unittest.mock import patch
from io import StringIO
import os
class MyTest(unittest.TestCase):
def run_test(self, given_answer, expected_print):
with patch('builtins.input', return_value=given_answer), patch('sys.stdout', new=StringIO()) as fake_out:
testing_copy.test_func()
self.assertEqual(fake_out.getvalue(), expected_print)
def test_1(self):
self.run_test(given_answer='10', expected_print='This is an even number.\n')
def test_2(self):
self.run_test(given_answer='12', expected_print="This is an even number.\n")
def test_3(self):
self.run_test(given_answer='90', expected_print='This is an even number.\n')
def test_4(self):
self.run_test(given_answer='13', expected_print='This is an odd number.\n')
print("\n\n\n.\n.\n.")
print('Checking what your code prints for several different numbers.\nFor the number 8 it should print this *exactly*:\n')
print('This is an even number.')
print('\nRunning some tests on your code:')
print(".\n.\n.")
unittest.main(verbosity=1, exit=False)
os.remove("testing_copy.py")
| 21.05102 | 122 | 0.663112 |
number = int(input("Which number do you want to check? "))
# 🚨 Don't change the code above 👆
print("This is an even number." if number % 2 == 0
else "This is an odd number.")
t_func():\n')
with open('day-3-1.py', 'r') as original:
f2 = original.readlines()[0:40]
for x in f2:
file.write(" " + x)
import testing_copy
import unittest
from unittest.mock import patch
from io import StringIO
import os
class MyTest(unittest.TestCase):
def run_test(self, given_answer, expected_print):
with patch('builtins.input', return_value=given_answer), patch('sys.stdout', new=StringIO()) as fake_out:
testing_copy.test_func()
self.assertEqual(fake_out.getvalue(), expected_print)
def test_1(self):
self.run_test(given_answer='10', expected_print='This is an even number.\n')
def test_2(self):
self.run_test(given_answer='12', expected_print="This is an even number.\n")
def test_3(self):
self.run_test(given_answer='90', expected_print='This is an even number.\n')
def test_4(self):
self.run_test(given_answer='13', expected_print='This is an odd number.\n')
print("\n\n\n.\n.\n.")
print('Checking what your code prints for several different numbers.\nFor the number 8 it should print this *exactly*:\n')
print('This is an even number.')
print('\nRunning some tests on your code:')
print(".\n.\n.")
unittest.main(verbosity=1, exit=False)
os.remove("testing_copy.py")
| true | true |
1c3854aecee3c4aeeb666f82155b1d9948d1b537 | 13,148 | py | Python | maskrcnn_benchmark-dota/modeling/rpn/rfcos/smallerRF_wo_clsloss.py | RSIA-LIESMARS-WHU/AxisLearning | 5a108860c959a200811f9643d567ca7883c74875 | [
"BSD-2-Clause"
] | 3 | 2021-01-26T07:03:26.000Z | 2021-02-03T12:14:05.000Z | maskrcnn_benchmark/modeling/rpn/rfcos/smallerRF_wo_clsloss.py | RSIA-LIESMARS-WHU/AxisLearning | 5a108860c959a200811f9643d567ca7883c74875 | [
"BSD-2-Clause"
] | null | null | null | maskrcnn_benchmark/modeling/rpn/rfcos/smallerRF_wo_clsloss.py | RSIA-LIESMARS-WHU/AxisLearning | 5a108860c959a200811f9643d567ca7883c74875 | [
"BSD-2-Clause"
] | null | null | null | """
This file contains specific functions for computing losses of FCOS
file
"""
import torch
from torch.nn import functional as F
from torch import nn
from ..utils import concat_box_prediction_layers
from maskrcnn_benchmark.layers import IOULoss
from maskrcnn_benchmark.layers import SigmoidFocalLoss, smooth_l1_loss
from maskrcnn_benchmark.modeling.matcher import Matcher
from maskrcnn_benchmark.modeling.utils import cat
from maskrcnn_benchmark.structures.rboxlist_ops import targets_for_locations
from maskrcnn_benchmark.structures.boxlist_ops import cat_boxlist
import time
INF = 100000000
# def onehot(label, cls_num):
# onehot = np.zeros(self.__num_classes, dtype=np.float)
# onehot[bbox_class_ind] = 1.0
# uniform_distribution = np.full(self.__num_classes, 1.0 / self.__num_classes)
# deta = 0.01
# smooth_onehot = onehot * (1 - deta) + deta * uniform_distribution
class FCOSLossComputation(object):
"""
This class computes the FCOS losses.
"""
def __init__(self, cfg):
self.cls_loss_func = SigmoidFocalLoss(
cfg.MODEL.FCOS.LOSS_GAMMA,
cfg.MODEL.FCOS.LOSS_ALPHA
)
# self.cls_loss_func = nn.CrossEntropyLoss(size_average=False, reduce=True)
self.cfg = cfg
# we make use of IOU Loss for bounding boxes regression,
# but we found that L1 in log scale can yield a similar performance
# self.box_reg_loss_func = IOULoss()
self.centerness_loss_func = nn.BCEWithLogitsLoss()
self.num_pts = cfg.MODEL.FCOS.NUM_PTS
# 2
def prepare_targets(self, points, targets):
# FoveaBox
# strides=[8, 16, 32, 64, 128],
# base_edge_list=[16, 32, 64, 128, 256],
# scale_ranges=((1, 64), (32, 128), (64, 256), (128, 512), (256, 2048)),
if self.cfg.MODEL.FCOS.SELECT_FEATURE_METHOD == "fcos":
# FCOS
object_sizes_of_interest = [
[-1, 64],
[64, 128],
[128, 256],
[256, 512],
[512, INF],
]
# object_sizes_of_interest = [
# [-1, 32],
# [32, 64],
# [64, 128],
# [128, 256],
# [256, INF],
# ]
elif self.cfg.MODEL.FCOS.SELECT_FEATURE_METHOD == "foveabox":
object_sizes_of_interest = [
[-1, 64],
[32, 128],
[64, 256],
[128, 512],
[256, INF],
]
elif self.cfg.MODEL.FCOS.SELECT_FEATURE_METHOD == "all":
object_sizes_of_interest = [
[-1, 64],
[-1, 128],
[-1, 256],
[-1, 512],
[-1, INF],
]
normal_factor = [16, 32, 64, 128, 256]
# normal_factor = [16, 48, 96, 192, 384]
expanded_object_sizes_of_interest = []
expanded_normal_factor=[]
# p3 - p7
for l, points_per_level in enumerate(points):
# 2
object_sizes_of_interest_per_level = \
points_per_level.new_tensor(object_sizes_of_interest[l])
# 1 2 -> len(points_per_level) 2
expanded_object_sizes_of_interest.append(
object_sizes_of_interest_per_level[None].expand(len(points_per_level), -1)
)
normal_factor_per_level = \
points_per_level.new_tensor(normal_factor[l])
# 1 2 -> len(points_per_level) 2
expanded_normal_factor.append(
normal_factor_per_level.expand(len(points_per_level))
)
expanded_object_sizes_of_interest = torch.cat(expanded_object_sizes_of_interest, dim=0)
expanded_normal_factor = torch,cat(expanded_normal_factor, dim=0)
num_points_per_level = [len(points_per_level) for points_per_level in points]
points_all_level = torch.cat(points, dim=0)
# batch len(locations) 1 batch len(locations) 6
labels, reg_targets = self.compute_targets_for_locations(
points_all_level, targets, expanded_object_sizes_of_interest, expanded_normal_factor
)
# 对每一张图片进行处理
for i in range(len(labels)):
labels[i] = torch.split(labels[i], num_points_per_level, dim=0)
reg_targets[i] = torch.split(reg_targets[i], num_points_per_level, dim=0)
labels_level_first = []
reg_targets_level_first = []
for level in range(len(points)):
labels_level_first.append(
torch.cat([labels_per_im[level] for labels_per_im in labels], dim=0)
)
reg_targets_level_first.append(
torch.cat([reg_targets_per_im[level] for reg_targets_per_im in reg_targets], dim=0)
)
return labels_level_first, reg_targets_level_first
# 3
def compute_targets_for_locations(self, locations, targets, object_sizes_of_interest, normal_factor):
labels = []
reg_targets = []
# xs, ys = locations[:, 0], locations[:, 1]
for im_i in range(len(targets)):
# 第i张图片
targets_per_im = targets[im_i]
# assert targets_per_im.mode == "xyxy"
bboxes = targets_per_im.bbox
labels_per_im = targets_per_im.get_field("labels")#.cpu()
# print(labels_per_im)
reg_targets_per_im = targets_for_locations(bboxes, locations)#.cpu()
# torch.cuda.empty_cache()
# max_reg_targets_per_im = reg_targets_per_im.max(dim=2)[0]
max_reg_targets_per_im = torch.abs(reg_targets_per_im[:,2:6]).max(dim=1)[0]
# distance
dist_1 = torch.sqrt(torch.pow(reg_targets_per_im[:,2],2) + torch.pow(reg_targets_per_im[:,3],2))
dist_2 = torch.sqrt(torch.pow(reg_targets_per_im[:,4],2) + torch.pow(reg_targets_per_im[:,5],2))
target_h = reg_targets_per_im[:,5]
max_reg_targets_per_im = torch.stack([dist_1, dist_2, target_h], dim=1).max(dim=1)[0]
# limit the regression range for each location 上下左右都要在感兴趣范围之内
# len(locations) len(locations), 1
object_sizes_of_interest= object_sizes_of_interest#.cpu()
is_cared_in_the_level = \
(max_reg_targets_per_im >= object_sizes_of_interest[:, 0]) & \
(max_reg_targets_per_im <= object_sizes_of_interest[:, 1])
# print("labels_per_im", len(labels_per_im), len(bboxes), torch.min(reg_targets_per_im[:, 0].long()), torch.max(reg_targets_per_im[:, 0].long()), reg_targets_per_im[:, 0].sum())
labels_per_im = labels_per_im[reg_targets_per_im[:, 0].long()]
# 落在目标框外面label为0
labels_per_im[reg_targets_per_im[:, 1] < 0.5 ] = 0#bg
# 或者落在外面且s感受野不够
labels_per_im[is_cared_in_the_level == 0] = 0#no reg
# 落在框内 但是感受野不够
labels_per_im[(reg_targets_per_im[:, 1] > 0.5) * (is_cared_in_the_level == 0)] = -1#ignore
# detax1 detay1 detax2 detay2 h
ones = torch.ones_like(reg_targets_per_im[:,2:7])
one_minusone = torch.where(reg_targets_per_im[:,2:7]>=0, ones, -ones)#.cpu()
reg_targets_per_im[:,2:7] = one_minusone*torch.pow(torch.abs(reg_targets_per_im[:,2:7])/normal_factor[1][:,None], 1/3)#.cpu()#.cpu()
labels.append(labels_per_im)
reg_targets.append(reg_targets_per_im[:,2:])
return labels, reg_targets
def compute_centerness_targets(self, reg_targets):
left_right = reg_targets[:, [0, 2]]
top_bottom = reg_targets[:, [1, 3]]
centerness = (left_right.min(dim=-1)[0] / left_right.max(dim=-1)[0]) * \
(top_bottom.min(dim=-1)[0] / top_bottom.max(dim=-1)[0])
return torch.sqrt(centerness)
# 1
def __call__(self, locations, box_cls, box_regression, centerness, targets):
"""
Arguments:
locations (list[BoxList])
box_cls (list[Tensor])
box_regression (list[Tensor])
centerness (list[Tensor])
targets (list[BoxList])
Returns:
cls_loss (Tensor)
reg_loss (Tensor)
centerness_loss (Tensor)
"""
# 0 fpn 第一层
N = box_cls[0].size(0)
num_classes = box_cls[0].size(1)#//self.num_pts
# level first
labels, reg_targets = self.prepare_targets(locations, targets)
box_cls_flatten = []
box_regression_flatten = []
centerness_flatten = []
labels_flatten = []
reg_targets_flatten = []
# for level
for l in range(len(labels)):
# batch*num_pos num_classes
box_cls_flatten.append(box_cls[l].permute(0, 2, 3, 1).reshape(-1, num_classes))
box_regression_flatten.append(box_regression[l].permute(0, 2, 3, 1).reshape(-1, 5))
# layer_h, layer_w = box_cls[l].size(2), box_cls[l].size(3)
# box_cls_flatten.append(box_cls[l].permute(0, 2, 3, 1).reshape(N, layer_h, layer_w, self.num_pts, num_classes).permute(0, 3, 1, 2, 4).reshape(-1,num_classes))
# box_regression_flatten.append(box_regression[l].permute(0, 2, 3, 1).reshape(N, layer_h, layer_w, self.num_pts, 5).permute(0, 3, 1, 2, 4).reshape(-1,5))
labels_flatten.append(labels[l].reshape(-1))
reg_targets_flatten.append(reg_targets[l].reshape(-1, 6))
centerness_flatten.append(centerness[l].reshape(-1))
# level batch*num_pos num_classes
box_cls_flatten = torch.cat(box_cls_flatten, dim=0)
box_regression_flatten = torch.cat(box_regression_flatten, dim=0)
centerness_flatten = torch.cat(centerness_flatten, dim=0)
labels_flatten = torch.cat(labels_flatten, dim=0)
reg_targets_flatten = torch.cat(reg_targets_flatten, dim=0)
pos_inds = torch.nonzero(labels_flatten > 0).squeeze(1)
valid_inds = torch.nonzero(labels_flatten > -1).squeeze(1)
ignore_inds = torch.nonzero(labels_flatten == -1).squeeze(1)
# wrong
# cls_weight=torch.where(centerness_flatten==0, torch.ones_like(centerness_flatten), centerness_flatten).unsqueeze(-1)
# cls_loss = self.cls_loss_func(
# box_cls_flatten,#.cpu()
# labels_flatten.int(),#,#.cpu()
# weight = cls_weight
# ) / (pos_inds.numel() + N) # add N to avoid dividing by a zero
# true
all_centerness_targets = reg_targets_flatten[:, -1]
# # torch.sqrt(
cls_weight = torch.where(all_centerness_targets==0, torch.ones_like(all_centerness_targets), all_centerness_targets).unsqueeze(-1)
cls_weight[ignore_inds] = 0.05
# # cls_weight=torch.where(all_centerness_targets==0, torch.full_like(all_centerness_targets, 1), all_centerness_targets).unsqueeze(-1)
''' 涉及到将感受野不够或者超过的点看做负样本/忽略样本/正样本
看成忽略样本涉及到定位不准确 冗余检测
看成负样本 conf is low'''
# # focal loss 2*
# cls_loss = self.cls_loss_func(
# box_cls_flatten[valid_inds],#.cpu()
# labels_flatten[valid_inds].int(),#.cpu()
# weight = cls_weight[valid_inds]
# ) / (pos_inds.numel() + N) # add N to avoid dividing by a zero
# weight = cl
labels_flatten[labels_flatten==-1]=0
cls_loss = self.cls_loss_func(
box_cls_flatten,#.cpu()
labels_flatten.int(),#.cpu()
weight = cls_weight ) / (pos_inds.numel() + N) # add N to avoid dividing by a zero
# s_weight,
# self.cls_loss_func = nn.CrossEntropyLoss(size_average=False, reduce=True)
# cls_loss = self.cls_loss_func(
# box_cls_flatten[pos_inds],#
# labels_flatten[pos_inds]-1,#
# )/ (pos_inds.numel() + N)
box_regression_pos = box_regression_flatten[pos_inds]
reg_targets_pos = reg_targets_flatten[pos_inds]
if pos_inds.numel() > 0:
centerness_targets_pos = reg_targets_pos[:, -1]
#只预测
reg_loss = smooth_l1_loss(
box_regression_pos,#.cpu()
reg_targets_pos[:, :-1],#.cpu()
weight = centerness_targets_pos.unsqueeze(-1)#cls_weight #
)
# 一定要回归center ness
# all
# centerness_flatten[ignore_inds] = 0
centerness_loss = self.centerness_loss_func(
centerness_flatten,#.cpu()
reg_targets_flatten[:,-1]#.cpu()
)
# centerness_loss = self.centerness_loss_func(
# centerness_flatten[pos_inds],#.cpu()
# centerness_targets_pos#.cpu()
# )
else:
reg_loss = box_regression_flatten.sum()
centerness_loss = centerness_flatten.sum()
# .cuda()
return cls_loss, reg_loss, centerness_loss#*0
def make_fcos_loss_evaluator(cfg):
loss_evaluator = FCOSLossComputation(cfg)
return loss_evaluator
| 40.580247 | 189 | 0.599255 |
import torch
from torch.nn import functional as F
from torch import nn
from ..utils import concat_box_prediction_layers
from maskrcnn_benchmark.layers import IOULoss
from maskrcnn_benchmark.layers import SigmoidFocalLoss, smooth_l1_loss
from maskrcnn_benchmark.modeling.matcher import Matcher
from maskrcnn_benchmark.modeling.utils import cat
from maskrcnn_benchmark.structures.rboxlist_ops import targets_for_locations
from maskrcnn_benchmark.structures.boxlist_ops import cat_boxlist
import time
INF = 100000000
class FCOSLossComputation(object):
def __init__(self, cfg):
self.cls_loss_func = SigmoidFocalLoss(
cfg.MODEL.FCOS.LOSS_GAMMA,
cfg.MODEL.FCOS.LOSS_ALPHA
)
self.cfg = cfg
self.centerness_loss_func = nn.BCEWithLogitsLoss()
self.num_pts = cfg.MODEL.FCOS.NUM_PTS
def prepare_targets(self, points, targets):
if self.cfg.MODEL.FCOS.SELECT_FEATURE_METHOD == "fcos":
object_sizes_of_interest = [
[-1, 64],
[64, 128],
[128, 256],
[256, 512],
[512, INF],
]
elif self.cfg.MODEL.FCOS.SELECT_FEATURE_METHOD == "foveabox":
object_sizes_of_interest = [
[-1, 64],
[32, 128],
[64, 256],
[128, 512],
[256, INF],
]
elif self.cfg.MODEL.FCOS.SELECT_FEATURE_METHOD == "all":
object_sizes_of_interest = [
[-1, 64],
[-1, 128],
[-1, 256],
[-1, 512],
[-1, INF],
]
normal_factor = [16, 32, 64, 128, 256]
expanded_object_sizes_of_interest = []
expanded_normal_factor=[]
for l, points_per_level in enumerate(points):
object_sizes_of_interest_per_level = \
points_per_level.new_tensor(object_sizes_of_interest[l])
expanded_object_sizes_of_interest.append(
object_sizes_of_interest_per_level[None].expand(len(points_per_level), -1)
)
normal_factor_per_level = \
points_per_level.new_tensor(normal_factor[l])
expanded_normal_factor.append(
normal_factor_per_level.expand(len(points_per_level))
)
expanded_object_sizes_of_interest = torch.cat(expanded_object_sizes_of_interest, dim=0)
expanded_normal_factor = torch,cat(expanded_normal_factor, dim=0)
num_points_per_level = [len(points_per_level) for points_per_level in points]
points_all_level = torch.cat(points, dim=0)
labels, reg_targets = self.compute_targets_for_locations(
points_all_level, targets, expanded_object_sizes_of_interest, expanded_normal_factor
)
for i in range(len(labels)):
labels[i] = torch.split(labels[i], num_points_per_level, dim=0)
reg_targets[i] = torch.split(reg_targets[i], num_points_per_level, dim=0)
labels_level_first = []
reg_targets_level_first = []
for level in range(len(points)):
labels_level_first.append(
torch.cat([labels_per_im[level] for labels_per_im in labels], dim=0)
)
reg_targets_level_first.append(
torch.cat([reg_targets_per_im[level] for reg_targets_per_im in reg_targets], dim=0)
)
return labels_level_first, reg_targets_level_first
def compute_targets_for_locations(self, locations, targets, object_sizes_of_interest, normal_factor):
labels = []
reg_targets = []
for im_i in range(len(targets)):
targets_per_im = targets[im_i]
bboxes = targets_per_im.bbox
labels_per_im = targets_per_im.get_field("labels")
reg_targets_per_im = targets_for_locations(bboxes, locations)
max_reg_targets_per_im = torch.abs(reg_targets_per_im[:,2:6]).max(dim=1)[0]
dist_1 = torch.sqrt(torch.pow(reg_targets_per_im[:,2],2) + torch.pow(reg_targets_per_im[:,3],2))
dist_2 = torch.sqrt(torch.pow(reg_targets_per_im[:,4],2) + torch.pow(reg_targets_per_im[:,5],2))
target_h = reg_targets_per_im[:,5]
max_reg_targets_per_im = torch.stack([dist_1, dist_2, target_h], dim=1).max(dim=1)[0]
object_sizes_of_interest= object_sizes_of_interest
is_cared_in_the_level = \
(max_reg_targets_per_im >= object_sizes_of_interest[:, 0]) & \
(max_reg_targets_per_im <= object_sizes_of_interest[:, 1])
labels_per_im = labels_per_im[reg_targets_per_im[:, 0].long()]
labels_per_im[reg_targets_per_im[:, 1] < 0.5 ] = 0
labels_per_im[is_cared_in_the_level == 0] = 0
labels_per_im[(reg_targets_per_im[:, 1] > 0.5) * (is_cared_in_the_level == 0)] = -1
ones = torch.ones_like(reg_targets_per_im[:,2:7])
one_minusone = torch.where(reg_targets_per_im[:,2:7]>=0, ones, -ones)
reg_targets_per_im[:,2:7] = one_minusone*torch.pow(torch.abs(reg_targets_per_im[:,2:7])/normal_factor[1][:,None], 1/3) labels.append(labels_per_im)
reg_targets.append(reg_targets_per_im[:,2:])
return labels, reg_targets
def compute_centerness_targets(self, reg_targets):
left_right = reg_targets[:, [0, 2]]
top_bottom = reg_targets[:, [1, 3]]
centerness = (left_right.min(dim=-1)[0] / left_right.max(dim=-1)[0]) * \
(top_bottom.min(dim=-1)[0] / top_bottom.max(dim=-1)[0])
return torch.sqrt(centerness)
def __call__(self, locations, box_cls, box_regression, centerness, targets):
N = box_cls[0].size(0)
num_classes = box_cls[0].size(1)
labels, reg_targets = self.prepare_targets(locations, targets)
box_cls_flatten = []
box_regression_flatten = []
centerness_flatten = []
labels_flatten = []
reg_targets_flatten = []
for l in range(len(labels)):
box_cls_flatten.append(box_cls[l].permute(0, 2, 3, 1).reshape(-1, num_classes))
box_regression_flatten.append(box_regression[l].permute(0, 2, 3, 1).reshape(-1, 5))
labels_flatten.append(labels[l].reshape(-1))
reg_targets_flatten.append(reg_targets[l].reshape(-1, 6))
centerness_flatten.append(centerness[l].reshape(-1))
box_cls_flatten = torch.cat(box_cls_flatten, dim=0)
box_regression_flatten = torch.cat(box_regression_flatten, dim=0)
centerness_flatten = torch.cat(centerness_flatten, dim=0)
labels_flatten = torch.cat(labels_flatten, dim=0)
reg_targets_flatten = torch.cat(reg_targets_flatten, dim=0)
pos_inds = torch.nonzero(labels_flatten > 0).squeeze(1)
valid_inds = torch.nonzero(labels_flatten > -1).squeeze(1)
ignore_inds = torch.nonzero(labels_flatten == -1).squeeze(1)
argets = reg_targets_flatten[:, -1]
weight = torch.where(all_centerness_targets==0, torch.ones_like(all_centerness_targets), all_centerness_targets).unsqueeze(-1)
cls_weight[ignore_inds] = 0.05
labels_flatten.int(),
weight = cls_weight ) / (pos_inds.numel() + N)
box_regression_pos = box_regression_flatten[pos_inds]
reg_targets_pos = reg_targets_flatten[pos_inds]
if pos_inds.numel() > 0:
centerness_targets_pos = reg_targets_pos[:, -1]
reg_loss = smooth_l1_loss(
box_regression_pos,
reg_targets_pos[:, :-1],
weight = centerness_targets_pos.unsqueeze(-1) )
centerness_loss = self.centerness_loss_func(
centerness_flatten,
reg_targets_flatten[:,-1]
)
else:
reg_loss = box_regression_flatten.sum()
centerness_loss = centerness_flatten.sum()
return cls_loss, reg_loss, centerness_loss
def make_fcos_loss_evaluator(cfg):
loss_evaluator = FCOSLossComputation(cfg)
return loss_evaluator
| true | true |
1c385522dc3ad879da84161cb05818f4cd356848 | 8,193 | py | Python | powerline_shell/colortrans.py | vivekkrish/powerline-shell | ad27ed4ccc28f314d7985776b8e8663a33a67102 | [
"MIT"
] | 2,656 | 2017-11-28T05:11:53.000Z | 2022-03-31T14:10:18.000Z | powerline_shell/colortrans.py | vivekkrish/powerline-shell | ad27ed4ccc28f314d7985776b8e8663a33a67102 | [
"MIT"
] | 193 | 2017-11-28T23:20:17.000Z | 2022-03-25T12:57:55.000Z | powerline_shell/colortrans.py | vivekkrish/powerline-shell | ad27ed4ccc28f314d7985776b8e8663a33a67102 | [
"MIT"
] | 387 | 2017-11-29T22:33:39.000Z | 2022-03-31T18:24:35.000Z | #! /usr/bin/env python
"""
Code is modified (fairly heavily) by hryanjones@gmail.com from
https://gist.github.com/MicahElliott/719710
Convert values between RGB tuples and xterm-256 color codes.
Nice long listing of all 256 colors and their codes. Useful for
developing console color themes, or even script output schemes.
Resources:
* http://en.wikipedia.org/wiki/8-bit_color
* http://en.wikipedia.org/wiki/ANSI_escape_code
* /usr/share/X11/rgb.txt
I'm not sure where this script was inspired from. I think I must have
written it from scratch, though it's been several years now.
"""
__author__ = 'Micah Elliott http://MicahElliott.com'
__version__ = '0.1'
__copyright__ = 'Copyright (C) 2011 Micah Elliott. All rights reserved.'
__license__ = 'WTFPL http://sam.zoy.org/wtfpl/'
#---------------------------------------------------------------------
def hexstr2num(hexstr):
return int(hexstr, 16)
def rgbstring2tuple(s):
return tuple([hexstr2num(h) for h in (s[:2], s[2:4], s[4:])])
RGB2SHORT_DICT = {
(0, 0, 0): 16,
(0, 0, 95): 17,
(0, 0, 128): 4,
(0, 0, 135): 18,
(0, 0, 175): 19,
(0, 0, 215): 20,
(0, 0, 255): 12,
(0, 95, 0): 22,
(0, 95, 95): 23,
(0, 95, 135): 24,
(0, 95, 175): 25,
(0, 95, 215): 26,
(0, 95, 255): 27,
(0, 128, 0): 2,
(0, 128, 128): 6,
(0, 135, 0): 28,
(0, 135, 95): 29,
(0, 135, 135): 30,
(0, 135, 175): 31,
(0, 135, 215): 32,
(0, 135, 255): 33,
(0, 175, 0): 34,
(0, 175, 95): 35,
(0, 175, 135): 36,
(0, 175, 175): 37,
(0, 175, 215): 38,
(0, 175, 255): 39,
(0, 215, 0): 40,
(0, 215, 95): 41,
(0, 215, 135): 42,
(0, 215, 175): 43,
(0, 215, 215): 44,
(0, 215, 255): 45,
(0, 255, 0): 46,
(0, 255, 95): 47,
(0, 255, 135): 48,
(0, 255, 175): 49,
(0, 255, 215): 50,
(0, 255, 255): 14,
(8, 8, 8): 232,
(18, 18, 18): 233,
(28, 28, 28): 234,
(38, 38, 38): 235,
(48, 48, 48): 236,
(58, 58, 58): 237,
(68, 68, 68): 238,
(78, 78, 78): 239,
(88, 88, 88): 240,
(95, 0, 0): 52,
(95, 0, 95): 53,
(95, 0, 135): 54,
(95, 0, 175): 55,
(95, 0, 215): 56,
(95, 0, 255): 57,
(95, 95, 0): 58,
(95, 95, 95): 59,
(95, 95, 135): 60,
(95, 95, 175): 61,
(95, 95, 215): 62,
(95, 95, 255): 63,
(95, 135, 0): 64,
(95, 135, 95): 65,
(95, 135, 135): 66,
(95, 135, 175): 67,
(95, 135, 215): 68,
(95, 135, 255): 69,
(95, 175, 0): 70,
(95, 175, 95) : 71,
(95, 175, 135): 72,
(95, 175, 175): 73,
(95, 175, 215): 74,
(95, 175, 255): 75,
(95, 215, 0): 76,
(95, 215, 95) : 77,
(95, 215, 135): 78,
(95, 215, 175): 79,
(95, 215, 215): 80,
(95, 215, 255): 81,
(95, 255, 0): 82,
(95, 255, 95) : 83,
(95, 255, 135): 84,
(95, 255, 175): 85,
(95, 255, 215): 86,
(95, 255, 255): 87,
(98, 98, 98): 241,
(108, 108, 108): 242,
(118, 118, 118): 243,
(128, 0, 0): 1,
(128, 0, 128): 5,
(128, 128, 0): 3,
(128, 128, 128): 244,
(135, 0, 0): 88,
(135, 0, 95): 89,
(135, 0, 135): 90,
(135, 0, 175): 91,
(135, 0, 215): 92,
(135, 0, 255): 93,
(135, 95, 0): 94,
(135, 95, 95): 95,
(135, 95, 135): 96,
(135, 95, 175): 97,
(135, 95, 215): 98,
(135, 95, 255): 99,
(135, 135, 0): 100,
(135, 135, 95): 101,
(135, 135, 135): 102,
(135, 135, 175): 103,
(135, 135, 215): 104,
(135, 135, 255): 105,
(135, 175, 0): 106,
(135, 175, 95): 107,
(135, 175, 135): 108,
(135, 175, 175): 109,
(135, 175, 215): 110,
(135, 175, 255): 111,
(135, 215, 0): 112,
(135, 215, 95): 113,
(135, 215, 135): 114,
(135, 215, 175): 115,
(135, 215, 215): 116,
(135, 215, 255): 117,
(135, 255, 0): 118,
(135, 255, 95): 119,
(135, 255, 135): 120,
(135, 255, 175): 121,
(135, 255, 215): 122,
(135, 255, 255): 123,
(138, 138, 138): 245,
(148, 148, 148): 246,
(158, 158, 158): 247,
(168, 168, 168): 248,
(175, 0, 0): 124,
(175, 0, 95): 125,
(175, 0, 135): 126,
(175, 0, 175): 127,
(175, 0, 215): 128,
(175, 0, 255): 129,
(175, 95, 0): 130,
(175, 95, 95): 131,
(175, 95, 135): 132,
(175, 95, 175): 133,
(175, 95, 215): 134,
(175, 95, 255): 135,
(175, 135, 0): 136,
(175, 135, 95): 137,
(175, 135, 135): 138,
(175, 135, 175): 139,
(175, 135, 215): 140,
(175, 135, 255): 141,
(175, 175, 0): 142,
(175, 175, 95): 143,
(175, 175, 135): 144,
(175, 175, 175): 145,
(175, 175, 215): 146,
(175, 175, 255): 147,
(175, 215, 0): 148,
(175, 215, 95): 149,
(175, 215, 135): 150,
(175, 215, 175): 151,
(175, 215, 215): 152,
(175, 215, 255): 153,
(175, 255, 0): 154,
(175, 255, 95): 155,
(175, 255, 135): 156,
(175, 255, 175): 157,
(175, 255, 215): 158,
(175, 255, 255): 159,
(178, 178, 178): 249,
(188, 188, 188): 250,
(192, 192, 192): 7,
(198, 198, 198): 251,
(208, 208, 208): 252,
(215, 0, 0): 160,
(215, 0, 95): 161,
(215, 0, 135): 162,
(215, 0, 175): 163,
(215, 0, 215): 164,
(215, 0, 255): 165,
(215, 95, 0): 166,
(215, 95, 95): 167,
(215, 95, 135): 168,
(215, 95, 175): 169,
(215, 95, 215): 170,
(215, 95, 255): 171,
(215, 135, 0): 172,
(215, 135, 95): 173,
(215, 135, 135): 174,
(215, 135, 175): 175,
(215, 135, 215): 176,
(215, 135, 255): 177,
(215, 175, 0): 178,
(215, 175, 95): 179,
(215, 175, 135): 180,
(215, 175, 175): 181,
(215, 175, 215): 182,
(215, 175, 255): 183,
(215, 215, 0): 184,
(215, 215, 95): 185,
(215, 215, 135): 186,
(215, 215, 175): 187,
(215, 215, 215): 188,
(215, 215, 255): 189,
(215, 255, 0): 190,
(215, 255, 95): 191,
(215, 255, 135): 192,
(215, 255, 175): 193,
(215, 255, 215): 194,
(215, 255, 255): 195,
(218, 218, 218): 253,
(228, 228, 228): 254,
(238, 238, 238): 255,
(255, 0, 0): 196,
(255, 0, 95): 197,
(255, 0, 135): 198,
(255, 0, 175): 199,
(255, 0, 215): 200,
(255, 0, 255): 13,
(255, 95, 0): 202,
(255, 95, 95): 203,
(255, 95, 135): 204,
(255, 95, 175): 205,
(255, 95, 215): 206,
(255, 95, 255): 207,
(255, 135, 0): 208,
(255, 135, 95): 209,
(255, 135, 135): 210,
(255, 135, 175): 211,
(255, 135, 215): 212,
(255, 135, 255): 213,
(255, 175, 0): 214,
(255, 175, 95): 215,
(255, 175, 135): 216,
(255, 175, 175): 217,
(255, 175, 215): 218,
(255, 175, 255): 219,
(255, 215, 0): 220,
(255, 215, 95): 221,
(255, 215, 135): 222,
(255, 215, 175): 223,
(255, 215, 215): 224,
(255, 215, 255): 225,
(255, 255, 0): 11,
(255, 255, 95): 227,
(255, 255, 135): 228,
(255, 255, 175): 229,
(255, 255, 215): 230,
(255, 255, 255): 231}
def rgb2short(r, g, b):
""" Find the closest xterm-256 approximation to the given RGB value.
@param r,g,b: each is a number between 0-255 for the Red, Green, and Blue values
@returns: integer between 0 and 255, compatible with xterm.
>>> rgb2short(18, 52, 86)
23
>>> rgb2short(255, 255, 255)
231
>>> rgb2short(13, 173, 214) # vimeo logo
38
"""
incs = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
# Break 6-char RGB code into 3 integer vals.
parts = [ r, g, b]
res = []
for part in parts:
i = 0
while i < len(incs)-1:
s, b = incs[i], incs[i+1] # smaller, bigger
if s <= part <= b:
s1 = abs(s - part)
b1 = abs(b - part)
if s1 < b1: closest = s
else: closest = b
res.append(closest)
break
i += 1
#print '***', res
return RGB2SHORT_DICT[tuple(res)]
#---------------------------------------------------------------------
if __name__ == '__main__':
import doctest
doctest.testmod()
| 25.683386 | 84 | 0.45368 |
__author__ = 'Micah Elliott http://MicahElliott.com'
__version__ = '0.1'
__copyright__ = 'Copyright (C) 2011 Micah Elliott. All rights reserved.'
__license__ = 'WTFPL http://sam.zoy.org/wtfpl/'
def hexstr2num(hexstr):
return int(hexstr, 16)
def rgbstring2tuple(s):
return tuple([hexstr2num(h) for h in (s[:2], s[2:4], s[4:])])
RGB2SHORT_DICT = {
(0, 0, 0): 16,
(0, 0, 95): 17,
(0, 0, 128): 4,
(0, 0, 135): 18,
(0, 0, 175): 19,
(0, 0, 215): 20,
(0, 0, 255): 12,
(0, 95, 0): 22,
(0, 95, 95): 23,
(0, 95, 135): 24,
(0, 95, 175): 25,
(0, 95, 215): 26,
(0, 95, 255): 27,
(0, 128, 0): 2,
(0, 128, 128): 6,
(0, 135, 0): 28,
(0, 135, 95): 29,
(0, 135, 135): 30,
(0, 135, 175): 31,
(0, 135, 215): 32,
(0, 135, 255): 33,
(0, 175, 0): 34,
(0, 175, 95): 35,
(0, 175, 135): 36,
(0, 175, 175): 37,
(0, 175, 215): 38,
(0, 175, 255): 39,
(0, 215, 0): 40,
(0, 215, 95): 41,
(0, 215, 135): 42,
(0, 215, 175): 43,
(0, 215, 215): 44,
(0, 215, 255): 45,
(0, 255, 0): 46,
(0, 255, 95): 47,
(0, 255, 135): 48,
(0, 255, 175): 49,
(0, 255, 215): 50,
(0, 255, 255): 14,
(8, 8, 8): 232,
(18, 18, 18): 233,
(28, 28, 28): 234,
(38, 38, 38): 235,
(48, 48, 48): 236,
(58, 58, 58): 237,
(68, 68, 68): 238,
(78, 78, 78): 239,
(88, 88, 88): 240,
(95, 0, 0): 52,
(95, 0, 95): 53,
(95, 0, 135): 54,
(95, 0, 175): 55,
(95, 0, 215): 56,
(95, 0, 255): 57,
(95, 95, 0): 58,
(95, 95, 95): 59,
(95, 95, 135): 60,
(95, 95, 175): 61,
(95, 95, 215): 62,
(95, 95, 255): 63,
(95, 135, 0): 64,
(95, 135, 95): 65,
(95, 135, 135): 66,
(95, 135, 175): 67,
(95, 135, 215): 68,
(95, 135, 255): 69,
(95, 175, 0): 70,
(95, 175, 95) : 71,
(95, 175, 135): 72,
(95, 175, 175): 73,
(95, 175, 215): 74,
(95, 175, 255): 75,
(95, 215, 0): 76,
(95, 215, 95) : 77,
(95, 215, 135): 78,
(95, 215, 175): 79,
(95, 215, 215): 80,
(95, 215, 255): 81,
(95, 255, 0): 82,
(95, 255, 95) : 83,
(95, 255, 135): 84,
(95, 255, 175): 85,
(95, 255, 215): 86,
(95, 255, 255): 87,
(98, 98, 98): 241,
(108, 108, 108): 242,
(118, 118, 118): 243,
(128, 0, 0): 1,
(128, 0, 128): 5,
(128, 128, 0): 3,
(128, 128, 128): 244,
(135, 0, 0): 88,
(135, 0, 95): 89,
(135, 0, 135): 90,
(135, 0, 175): 91,
(135, 0, 215): 92,
(135, 0, 255): 93,
(135, 95, 0): 94,
(135, 95, 95): 95,
(135, 95, 135): 96,
(135, 95, 175): 97,
(135, 95, 215): 98,
(135, 95, 255): 99,
(135, 135, 0): 100,
(135, 135, 95): 101,
(135, 135, 135): 102,
(135, 135, 175): 103,
(135, 135, 215): 104,
(135, 135, 255): 105,
(135, 175, 0): 106,
(135, 175, 95): 107,
(135, 175, 135): 108,
(135, 175, 175): 109,
(135, 175, 215): 110,
(135, 175, 255): 111,
(135, 215, 0): 112,
(135, 215, 95): 113,
(135, 215, 135): 114,
(135, 215, 175): 115,
(135, 215, 215): 116,
(135, 215, 255): 117,
(135, 255, 0): 118,
(135, 255, 95): 119,
(135, 255, 135): 120,
(135, 255, 175): 121,
(135, 255, 215): 122,
(135, 255, 255): 123,
(138, 138, 138): 245,
(148, 148, 148): 246,
(158, 158, 158): 247,
(168, 168, 168): 248,
(175, 0, 0): 124,
(175, 0, 95): 125,
(175, 0, 135): 126,
(175, 0, 175): 127,
(175, 0, 215): 128,
(175, 0, 255): 129,
(175, 95, 0): 130,
(175, 95, 95): 131,
(175, 95, 135): 132,
(175, 95, 175): 133,
(175, 95, 215): 134,
(175, 95, 255): 135,
(175, 135, 0): 136,
(175, 135, 95): 137,
(175, 135, 135): 138,
(175, 135, 175): 139,
(175, 135, 215): 140,
(175, 135, 255): 141,
(175, 175, 0): 142,
(175, 175, 95): 143,
(175, 175, 135): 144,
(175, 175, 175): 145,
(175, 175, 215): 146,
(175, 175, 255): 147,
(175, 215, 0): 148,
(175, 215, 95): 149,
(175, 215, 135): 150,
(175, 215, 175): 151,
(175, 215, 215): 152,
(175, 215, 255): 153,
(175, 255, 0): 154,
(175, 255, 95): 155,
(175, 255, 135): 156,
(175, 255, 175): 157,
(175, 255, 215): 158,
(175, 255, 255): 159,
(178, 178, 178): 249,
(188, 188, 188): 250,
(192, 192, 192): 7,
(198, 198, 198): 251,
(208, 208, 208): 252,
(215, 0, 0): 160,
(215, 0, 95): 161,
(215, 0, 135): 162,
(215, 0, 175): 163,
(215, 0, 215): 164,
(215, 0, 255): 165,
(215, 95, 0): 166,
(215, 95, 95): 167,
(215, 95, 135): 168,
(215, 95, 175): 169,
(215, 95, 215): 170,
(215, 95, 255): 171,
(215, 135, 0): 172,
(215, 135, 95): 173,
(215, 135, 135): 174,
(215, 135, 175): 175,
(215, 135, 215): 176,
(215, 135, 255): 177,
(215, 175, 0): 178,
(215, 175, 95): 179,
(215, 175, 135): 180,
(215, 175, 175): 181,
(215, 175, 215): 182,
(215, 175, 255): 183,
(215, 215, 0): 184,
(215, 215, 95): 185,
(215, 215, 135): 186,
(215, 215, 175): 187,
(215, 215, 215): 188,
(215, 215, 255): 189,
(215, 255, 0): 190,
(215, 255, 95): 191,
(215, 255, 135): 192,
(215, 255, 175): 193,
(215, 255, 215): 194,
(215, 255, 255): 195,
(218, 218, 218): 253,
(228, 228, 228): 254,
(238, 238, 238): 255,
(255, 0, 0): 196,
(255, 0, 95): 197,
(255, 0, 135): 198,
(255, 0, 175): 199,
(255, 0, 215): 200,
(255, 0, 255): 13,
(255, 95, 0): 202,
(255, 95, 95): 203,
(255, 95, 135): 204,
(255, 95, 175): 205,
(255, 95, 215): 206,
(255, 95, 255): 207,
(255, 135, 0): 208,
(255, 135, 95): 209,
(255, 135, 135): 210,
(255, 135, 175): 211,
(255, 135, 215): 212,
(255, 135, 255): 213,
(255, 175, 0): 214,
(255, 175, 95): 215,
(255, 175, 135): 216,
(255, 175, 175): 217,
(255, 175, 215): 218,
(255, 175, 255): 219,
(255, 215, 0): 220,
(255, 215, 95): 221,
(255, 215, 135): 222,
(255, 215, 175): 223,
(255, 215, 215): 224,
(255, 215, 255): 225,
(255, 255, 0): 11,
(255, 255, 95): 227,
(255, 255, 135): 228,
(255, 255, 175): 229,
(255, 255, 215): 230,
(255, 255, 255): 231}
def rgb2short(r, g, b):
incs = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
parts = [ r, g, b]
res = []
for part in parts:
i = 0
while i < len(incs)-1:
s, b = incs[i], incs[i+1]
if s <= part <= b:
s1 = abs(s - part)
b1 = abs(b - part)
if s1 < b1: closest = s
else: closest = b
res.append(closest)
break
i += 1
return RGB2SHORT_DICT[tuple(res)]
if __name__ == '__main__':
import doctest
doctest.testmod()
| true | true |
1c38553cfee6e8a970919d085bb6fbe062063cc1 | 773 | py | Python | src/qgis_ros/core/translators/json_transport.py | acfrmarine/qgis_ros | 01dd107f963b87df063bf2f11b0a484f4323cae9 | [
"MIT"
] | 31 | 2018-09-11T17:50:17.000Z | 2021-09-13T11:48:47.000Z | src/qgis_ros/core/translators/json_transport.py | acfrmarine/qgis_ros | 01dd107f963b87df063bf2f11b0a484f4323cae9 | [
"MIT"
] | 9 | 2018-09-11T00:45:23.000Z | 2022-01-07T13:19:20.000Z | src/qgis_ros/core/translators/json_transport.py | acfrmarine/qgis_ros | 01dd107f963b87df063bf2f11b0a484f4323cae9 | [
"MIT"
] | 9 | 2018-10-12T12:31:38.000Z | 2021-06-05T02:52:44.000Z | import json_transport
from .translator import Translator, VectorTranslatorMixin
class JSONTransportTranslator(Translator, VectorTranslatorMixin):
messageType = json_transport.PackedJson
# geomType = Translator.GeomTypes.Unknown # Need to detect this from the first message.
geomType = Translator.GeomTypes.Polygon # TODO: revert this.
@staticmethod
def translate(msg):
# Attempt to detect GeoJSON in a JSON message.
msg = msg.data
if isinstance(msg, list):
geojson_msg = msg
elif isinstance(msg, dict) and msg.get('type') == 'FeatureCollection':
geojson_msg = msg.get('features')
else:
raise ValueError('JSON message is not valid GeoJSON.')
return geojson_msg
| 30.92 | 92 | 0.683053 | import json_transport
from .translator import Translator, VectorTranslatorMixin
class JSONTransportTranslator(Translator, VectorTranslatorMixin):
messageType = json_transport.PackedJson
@staticmethod
def translate(msg):
msg = msg.data
if isinstance(msg, list):
geojson_msg = msg
elif isinstance(msg, dict) and msg.get('type') == 'FeatureCollection':
geojson_msg = msg.get('features')
else:
raise ValueError('JSON message is not valid GeoJSON.')
return geojson_msg
| true | true |
1c3855af57ee7989c4d5d05a2f3435cc8c8f1ddf | 16,700 | py | Python | unicorn_binance_websocket_api/restclient.py | HrTran/unicorn-binance-websocket-api | 64da01868a8c251a486993b868f0de5083922d64 | [
"MIT"
] | null | null | null | unicorn_binance_websocket_api/restclient.py | HrTran/unicorn-binance-websocket-api | 64da01868a8c251a486993b868f0de5083922d64 | [
"MIT"
] | null | null | null | unicorn_binance_websocket_api/restclient.py | HrTran/unicorn-binance-websocket-api | 64da01868a8c251a486993b868f0de5083922d64 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# File: unicorn_binance_websocket_api/restclient.py
#
# Part of ‘UNICORN Binance WebSocket API’
# Project website: https://github.com/LUCIT-Systems-and-Development/unicorn-binance-websocket-api
# Documentation: https://lucit-systems-and-development.github.io/unicorn-binance-websocket-api
# PyPI: https://pypi.org/project/unicorn-binance-websocket-api/
#
# Author: LUCIT Systems and Development
#
# Copyright (c) 2019-2022, LUCIT Systems and Development (https://www.lucit.tech) and Oliver Zehentleitner
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import json
import logging
import requests
import socket
import threading
import time
logger = logging.getLogger(__name__)
class BinanceWebSocketApiRestclient(object):
def __init__(self, manager):
"""
Create a restclient instance!
:param manager: provide `self` of `BinanceWebsocketApiManager()`
:type manager: object
"""
self.manager = manager
self.api_key = False
self.api_secret = False
self.symbol = False
self.listen_key = False
self.last_static_ping_listen_key = False
self.listen_key_output = False
self.threading_lock = threading.Lock()
if self.manager.exchange == "binance.com":
self.restful_base_uri = "https://api.binance.com/"
self.path_userdata = "api/v3/userDataStream"
elif self.manager.exchange == "binance.com-testnet":
self.restful_base_uri = "https://testnet.binance.vision/"
self.path_userdata = "api/v3/userDataStream"
elif self.manager.exchange == "binance.com-margin":
self.restful_base_uri = "https://api.binance.com/"
self.path_userdata = "sapi/v1/userDataStream"
elif self.manager.exchange == "binance.com-margin-testnet":
self.restful_base_uri = "https://testnet.binance.vision/"
self.path_userdata = "sapi/v1/userDataStream"
elif self.manager.exchange == "binance.com-isolated_margin":
self.restful_base_uri = "https://api.binance.com/"
self.path_userdata = "sapi/v1/userDataStream/isolated"
elif self.manager.exchange == "binance.com-isolated_margin-testnet":
self.restful_base_uri = "https://testnet.binance.vision/"
self.path_userdata = "sapi/v1/userDataStream/isolated"
elif self.manager.exchange == "binance.com-futures":
self.restful_base_uri = "https://fapi.binance.com/"
self.path_userdata = "fapi/v1/listenKey"
elif self.manager.exchange == "binance.com-futures-testnet":
self.restful_base_uri = "https://testnet.binancefuture.com/"
self.path_userdata = "fapi/v1/listenKey"
elif self.manager.exchange == "binance.com-coin-futures" or self.manager.exchange == "binance.com-coin_futures":
self.restful_base_uri = "https://dapi.binance.com/"
self.path_userdata = "dapi/v1/listenKey"
elif self.manager.exchange == "binance.je":
self.restful_base_uri = "https://api.binance.je/"
self.path_userdata = "api/v1/userDataStream"
elif self.manager.exchange == "binance.us":
self.restful_base_uri = "https://api.binance.us/"
self.path_userdata = "api/v1/userDataStream"
elif self.manager.exchange == "trbinance.com":
self.restful_base_uri = "https://api.binance.cc/"
self.path_userdata = "api/v1/userDataStream"
elif self.manager.exchange == "jex.com":
self.restful_base_uri = "https://www.jex.com/"
self.path_userdata = "api/v1/userDataStream"
def _do_request(self, action=False):
"""
Do a request!
:param action: choose "delete" or "keepalive"
:type action: str
:return: the response
:rtype: str or False
"""
if action == "keepalive":
logger.info(f"BinanceWebSocketApiRestclient.keepalive_listen_key({str(self.listen_key_output)})")
method = "put"
try:
response = self._request(method, self.path_userdata, False, {'listenKey': str(self.listen_key)})
self.last_static_ping_listen_key = time.time()
return response
except KeyError:
return False
except TypeError:
return False
elif action == "delete":
logger.info(f"BinanceWebSocketApiRestclient.delete_listen_key({str(self.listen_key_output)})")
method = "delete"
try:
response = self._request(method, self.path_userdata, False, {'listenKey': str(self.listen_key)})
self.listen_key = False
return response
except KeyError as error_msg:
logger.error(f"BinanceWebSocketApiRestclient.delete_listen_key({str(self.listen_key_output)}) - "
f"KeyError - error_msg: {str(error_msg)}")
return False
except TypeError as error_msg:
logger.error(f"BinanceWebSocketApiRestclient.delete_listen_key({str(self.listen_key_output)}) - "
f"KeyError - error_msg: {str(error_msg)}")
return False
else:
return False
def _init_vars(self,
stream_id,
api_key=False,
api_secret=False,
symbol=False,
listen_key=False,
last_static_ping_listen_key=False):
"""
set default values and load values from stream_list
:param stream_id: provide a stream_id (only needed for userData Streams (acquiring a listenKey)
:type stream_id: str
:param api_key: provide a valid Binance API key
:type api_key: str
:param api_secret: provide a valid Binance API secret
:type api_secret: str
:param symbol: provide the symbol for isolated_margin user_data listen_key
:type symbol: str
:param listen_key: provide the listen_key
:type listen_key: str
:param listen_key: the `last_static_ping_listen_key` variable of the `listen_key` you want to keepalive
:type listen_key: int
:return: bool
"""
self.api_key = api_key
self.api_secret = api_secret
self.listen_key = listen_key
self.symbol = symbol
self.last_static_ping_listen_key = last_static_ping_listen_key
self.listen_key_output = self.manager.replacement_text
try:
if self.api_key is False:
self.api_key = self.manager.stream_list[stream_id]['api_key']
if self.api_secret is False:
self.api_secret = self.manager.stream_list[stream_id]['api_secret']
if self.symbol is False:
self.symbol = self.manager.stream_list[stream_id]['symbols']
if self.listen_key is False:
self.listen_key = self.manager.stream_list[stream_id]['listen_key']
if self.last_static_ping_listen_key is False:
self.last_static_ping_listen_key = self.manager.stream_list[stream_id]['last_static_ping_listen_key']
if self.manager.show_secrets_in_logs is True:
self.listen_key_output = self.listen_key
except KeyError as error_msg:
logger.error(f"BinanceWebSocketApiRestclient.init_vars() - TypeError - error_msg: {str(error_msg)}")
return False
return True
def _request(self, method, path, query=False, data=False):
"""
Do the request
:param method: choose the method to use (post, put or delete)
:type method: str
:param path: choose the path to use
:type path: str
:param query: choose the query to use
:type query: str
:param data: the payload for the post method
:type data: str
:return: the response
:rtype: str or False
"""
requests_headers = {'Accept': 'application/json',
'User-Agent': str(self.manager.get_user_agent()),
'X-MBX-APIKEY': str(self.api_key)}
if query is not False:
uri = self.restful_base_uri + path + "?" + query
else:
uri = self.restful_base_uri + path
try:
if method == "post":
if data is False:
request_handler = requests.post(uri, headers=requests_headers)
else:
request_handler = requests.post(uri, headers=requests_headers, data=data)
elif method == "put":
request_handler = requests.put(uri, headers=requests_headers, data=data)
elif method == "delete":
request_handler = requests.delete(uri, headers=requests_headers)
else:
request_handler = False
except requests.exceptions.ConnectionError as error_msg:
logger.critical(f"BinanceWebSocketApiRestclient._request() - error: 9 - error_msg: {str(error_msg)}")
return False
except socket.gaierror as error_msg:
logger.critical(f"BinanceWebSocketApiRestclient._request() - error: 10 - error_msg: {str(error_msg)}")
return False
if request_handler.status_code == "418":
logger.critical("BinanceWebSocketApiRestclient._request() - error_msg: received status_code 418 from binance! You got"
"banned from the binance api! Read this: https://github.com/binance-exchange/binance-"
"official-api-sphinx/blob/master/rest-api.md#limits")
elif request_handler.status_code == "429":
logger.critical("BinanceWebSocketApiRestclient._request() - error_msg: received status_code 429 from "
"binance! Back off or you are going to get banned! Read this: "
"https://github.com/binance-exchange/binance-official-api-sphinx/blob/master/"
"rest-api.md#limits")
try:
respond = request_handler.json()
except json.decoder.JSONDecodeError as error_msg:
logger.error(f"BinanceWebSocketApiRestclient._request() - error_msg: {str(error_msg)}")
return False
self.manager.binance_api_status['weight'] = request_handler.headers.get('X-MBX-USED-WEIGHT')
self.manager.binance_api_status['timestamp'] = time.time()
self.manager.binance_api_status['status_code'] = request_handler.status_code
request_handler.close()
return respond
def get_listen_key(self,
stream_id=False,
api_key=False,
api_secret=False,
last_static_ping_listen_key=False,
symbol=False):
"""
Request a valid listen_key from binance
:param stream_id: provide a stream_id
:type stream_id: str
:param api_key: provide a valid Binance API key
:type api_key: str
:param api_secret: provide a valid Binance API secret
:type api_secret: str
:param last_static_ping_listen_key: the `last_static_ping_listen_key` variable of the `listen_key` you want to keepalive
:type last_static_ping_listen_key: int
:param symbol: provide the symbol for isolated_margin user_data listen_key
:type symbol: str
:return: listen_key
:rtype: str or False
"""
logger.info(f"BinanceWebSocketApiRestclient.get_listen_key() symbol='{str(self.symbol)}' "
f"stream_id='{str(stream_id)}')")
if stream_id is False:
return False
with self.threading_lock:
self._init_vars(stream_id,
api_key=api_key,
api_secret=api_secret,
symbol=symbol,
last_static_ping_listen_key=last_static_ping_listen_key)
method = "post"
if self.manager.exchange == "binance.com-isolated_margin" or \
self.manager.exchange == "binance.com-isolated_margin-testnet":
if self.symbol is False:
logger.critical("BinanceWebSocketApiRestclient.get_listen_key() - error_msg: Parameter "
"`symbol` is missing!")
return False
else:
response = self._request(method, self.path_userdata, False, {'symbol': str(self.symbol)})
else:
try:
response = self._request(method, self.path_userdata)
except AttributeError as error_msg:
logger.critical(f"BinanceWebSocketApiRestclient.get_listen_key() - error: 8 - "
f"error_msg: {error_msg} - Can not acquire listen_key!")
return False
try:
self.listen_key = response['listenKey']
self.last_static_ping_listen_key = time.time()
return response
except KeyError:
return response
except TypeError:
return False
def delete_listen_key(self,
stream_id=False,
api_key=False,
api_secret=False,
listen_key=False):
"""
Delete a specific listen key
:param stream_id: provide a stream_id
:type stream_id: str
:param api_key: provide a valid Binance API key
:type api_key: str
:param api_secret: provide a valid Binance API secret
:type api_secret: str
:param listen_key: the listenkey you want to delete
:type listen_key: str or bool
:return: the response
:rtype: str or False
"""
logger.info(f"BinanceWebSocketApiRestclient.delete_listen_key() stream_id='{str(stream_id)}')")
if stream_id is False:
return False
with self.threading_lock:
self._init_vars(stream_id, api_key, api_secret, listen_key)
return self._do_request("delete")
def keepalive_listen_key(self,
stream_id=False,
api_key=False,
api_secret=False,
listen_key=False,
last_static_ping_listen_key=False):
"""
Ping a listenkey to keep it alive
:param stream_id: provide a stream_id
:type stream_id: str
:param api_key: provide a valid Binance API key
:type api_key: str
:param api_secret: provide a valid Binance API secret
:type api_secret: str
:param listen_key: the listenkey you want to keepalive
:type listen_key: str
:param last_static_ping_listen_key: the `last_static_ping_listen_key` variable of the `listen_key` you want to keepalive
:type last_static_ping_listen_key: int
:return: the response
:rtype: str or False
"""
logger.info(f"BinanceWebSocketApiRestclient.get_listen_key() stream_id='{str(stream_id)}')")
if stream_id is False:
return False
with self.threading_lock:
self._init_vars(stream_id, api_key, api_secret, listen_key, last_static_ping_listen_key)
return self._do_request("keepalive")
| 46.648045 | 130 | 0.614731 |
import json
import logging
import requests
import socket
import threading
import time
logger = logging.getLogger(__name__)
class BinanceWebSocketApiRestclient(object):
def __init__(self, manager):
self.manager = manager
self.api_key = False
self.api_secret = False
self.symbol = False
self.listen_key = False
self.last_static_ping_listen_key = False
self.listen_key_output = False
self.threading_lock = threading.Lock()
if self.manager.exchange == "binance.com":
self.restful_base_uri = "https://api.binance.com/"
self.path_userdata = "api/v3/userDataStream"
elif self.manager.exchange == "binance.com-testnet":
self.restful_base_uri = "https://testnet.binance.vision/"
self.path_userdata = "api/v3/userDataStream"
elif self.manager.exchange == "binance.com-margin":
self.restful_base_uri = "https://api.binance.com/"
self.path_userdata = "sapi/v1/userDataStream"
elif self.manager.exchange == "binance.com-margin-testnet":
self.restful_base_uri = "https://testnet.binance.vision/"
self.path_userdata = "sapi/v1/userDataStream"
elif self.manager.exchange == "binance.com-isolated_margin":
self.restful_base_uri = "https://api.binance.com/"
self.path_userdata = "sapi/v1/userDataStream/isolated"
elif self.manager.exchange == "binance.com-isolated_margin-testnet":
self.restful_base_uri = "https://testnet.binance.vision/"
self.path_userdata = "sapi/v1/userDataStream/isolated"
elif self.manager.exchange == "binance.com-futures":
self.restful_base_uri = "https://fapi.binance.com/"
self.path_userdata = "fapi/v1/listenKey"
elif self.manager.exchange == "binance.com-futures-testnet":
self.restful_base_uri = "https://testnet.binancefuture.com/"
self.path_userdata = "fapi/v1/listenKey"
elif self.manager.exchange == "binance.com-coin-futures" or self.manager.exchange == "binance.com-coin_futures":
self.restful_base_uri = "https://dapi.binance.com/"
self.path_userdata = "dapi/v1/listenKey"
elif self.manager.exchange == "binance.je":
self.restful_base_uri = "https://api.binance.je/"
self.path_userdata = "api/v1/userDataStream"
elif self.manager.exchange == "binance.us":
self.restful_base_uri = "https://api.binance.us/"
self.path_userdata = "api/v1/userDataStream"
elif self.manager.exchange == "trbinance.com":
self.restful_base_uri = "https://api.binance.cc/"
self.path_userdata = "api/v1/userDataStream"
elif self.manager.exchange == "jex.com":
self.restful_base_uri = "https://www.jex.com/"
self.path_userdata = "api/v1/userDataStream"
def _do_request(self, action=False):
if action == "keepalive":
logger.info(f"BinanceWebSocketApiRestclient.keepalive_listen_key({str(self.listen_key_output)})")
method = "put"
try:
response = self._request(method, self.path_userdata, False, {'listenKey': str(self.listen_key)})
self.last_static_ping_listen_key = time.time()
return response
except KeyError:
return False
except TypeError:
return False
elif action == "delete":
logger.info(f"BinanceWebSocketApiRestclient.delete_listen_key({str(self.listen_key_output)})")
method = "delete"
try:
response = self._request(method, self.path_userdata, False, {'listenKey': str(self.listen_key)})
self.listen_key = False
return response
except KeyError as error_msg:
logger.error(f"BinanceWebSocketApiRestclient.delete_listen_key({str(self.listen_key_output)}) - "
f"KeyError - error_msg: {str(error_msg)}")
return False
except TypeError as error_msg:
logger.error(f"BinanceWebSocketApiRestclient.delete_listen_key({str(self.listen_key_output)}) - "
f"KeyError - error_msg: {str(error_msg)}")
return False
else:
return False
def _init_vars(self,
stream_id,
api_key=False,
api_secret=False,
symbol=False,
listen_key=False,
last_static_ping_listen_key=False):
self.api_key = api_key
self.api_secret = api_secret
self.listen_key = listen_key
self.symbol = symbol
self.last_static_ping_listen_key = last_static_ping_listen_key
self.listen_key_output = self.manager.replacement_text
try:
if self.api_key is False:
self.api_key = self.manager.stream_list[stream_id]['api_key']
if self.api_secret is False:
self.api_secret = self.manager.stream_list[stream_id]['api_secret']
if self.symbol is False:
self.symbol = self.manager.stream_list[stream_id]['symbols']
if self.listen_key is False:
self.listen_key = self.manager.stream_list[stream_id]['listen_key']
if self.last_static_ping_listen_key is False:
self.last_static_ping_listen_key = self.manager.stream_list[stream_id]['last_static_ping_listen_key']
if self.manager.show_secrets_in_logs is True:
self.listen_key_output = self.listen_key
except KeyError as error_msg:
logger.error(f"BinanceWebSocketApiRestclient.init_vars() - TypeError - error_msg: {str(error_msg)}")
return False
return True
def _request(self, method, path, query=False, data=False):
requests_headers = {'Accept': 'application/json',
'User-Agent': str(self.manager.get_user_agent()),
'X-MBX-APIKEY': str(self.api_key)}
if query is not False:
uri = self.restful_base_uri + path + "?" + query
else:
uri = self.restful_base_uri + path
try:
if method == "post":
if data is False:
request_handler = requests.post(uri, headers=requests_headers)
else:
request_handler = requests.post(uri, headers=requests_headers, data=data)
elif method == "put":
request_handler = requests.put(uri, headers=requests_headers, data=data)
elif method == "delete":
request_handler = requests.delete(uri, headers=requests_headers)
else:
request_handler = False
except requests.exceptions.ConnectionError as error_msg:
logger.critical(f"BinanceWebSocketApiRestclient._request() - error: 9 - error_msg: {str(error_msg)}")
return False
except socket.gaierror as error_msg:
logger.critical(f"BinanceWebSocketApiRestclient._request() - error: 10 - error_msg: {str(error_msg)}")
return False
if request_handler.status_code == "418":
logger.critical("BinanceWebSocketApiRestclient._request() - error_msg: received status_code 418 from binance! You got"
"banned from the binance api! Read this: https://github.com/binance-exchange/binance-"
"official-api-sphinx/blob/master/rest-api.md#limits")
elif request_handler.status_code == "429":
logger.critical("BinanceWebSocketApiRestclient._request() - error_msg: received status_code 429 from "
"binance! Back off or you are going to get banned! Read this: "
"https://github.com/binance-exchange/binance-official-api-sphinx/blob/master/"
"rest-api.md#limits")
try:
respond = request_handler.json()
except json.decoder.JSONDecodeError as error_msg:
logger.error(f"BinanceWebSocketApiRestclient._request() - error_msg: {str(error_msg)}")
return False
self.manager.binance_api_status['weight'] = request_handler.headers.get('X-MBX-USED-WEIGHT')
self.manager.binance_api_status['timestamp'] = time.time()
self.manager.binance_api_status['status_code'] = request_handler.status_code
request_handler.close()
return respond
def get_listen_key(self,
stream_id=False,
api_key=False,
api_secret=False,
last_static_ping_listen_key=False,
symbol=False):
logger.info(f"BinanceWebSocketApiRestclient.get_listen_key() symbol='{str(self.symbol)}' "
f"stream_id='{str(stream_id)}')")
if stream_id is False:
return False
with self.threading_lock:
self._init_vars(stream_id,
api_key=api_key,
api_secret=api_secret,
symbol=symbol,
last_static_ping_listen_key=last_static_ping_listen_key)
method = "post"
if self.manager.exchange == "binance.com-isolated_margin" or \
self.manager.exchange == "binance.com-isolated_margin-testnet":
if self.symbol is False:
logger.critical("BinanceWebSocketApiRestclient.get_listen_key() - error_msg: Parameter "
"`symbol` is missing!")
return False
else:
response = self._request(method, self.path_userdata, False, {'symbol': str(self.symbol)})
else:
try:
response = self._request(method, self.path_userdata)
except AttributeError as error_msg:
logger.critical(f"BinanceWebSocketApiRestclient.get_listen_key() - error: 8 - "
f"error_msg: {error_msg} - Can not acquire listen_key!")
return False
try:
self.listen_key = response['listenKey']
self.last_static_ping_listen_key = time.time()
return response
except KeyError:
return response
except TypeError:
return False
def delete_listen_key(self,
stream_id=False,
api_key=False,
api_secret=False,
listen_key=False):
logger.info(f"BinanceWebSocketApiRestclient.delete_listen_key() stream_id='{str(stream_id)}')")
if stream_id is False:
return False
with self.threading_lock:
self._init_vars(stream_id, api_key, api_secret, listen_key)
return self._do_request("delete")
def keepalive_listen_key(self,
stream_id=False,
api_key=False,
api_secret=False,
listen_key=False,
last_static_ping_listen_key=False):
logger.info(f"BinanceWebSocketApiRestclient.get_listen_key() stream_id='{str(stream_id)}')")
if stream_id is False:
return False
with self.threading_lock:
self._init_vars(stream_id, api_key, api_secret, listen_key, last_static_ping_listen_key)
return self._do_request("keepalive")
| true | true |
1c38572ef8d952a91c39c221f8b1ab75118fae3b | 536 | py | Python | helloWorldApp/app/__init__.py | OscarPalominoC/cursoFlask | 8f87b8165a45c1c0124184e9866a505def463e44 | [
"MIT"
] | null | null | null | helloWorldApp/app/__init__.py | OscarPalominoC/cursoFlask | 8f87b8165a45c1c0124184e9866a505def463e44 | [
"MIT"
] | null | null | null | helloWorldApp/app/__init__.py | OscarPalominoC/cursoFlask | 8f87b8165a45c1c0124184e9866a505def463e44 | [
"MIT"
] | null | null | null | from flask import Flask
from flask_bootstrap import Bootstrap
from flask_login import LoginManager
from .config import Config
from .auth import auth
from .models import UserModel
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
@login_manager.user_loader
def load_user(username):
return UserModel.query(username)
def create_app():
app = Flask(__name__)
bootstrap = Bootstrap(app)
app.config.from_object(Config)
login_manager.init_app(app)
app.register_blueprint(auth)
return app | 20.615385 | 39 | 0.774254 | from flask import Flask
from flask_bootstrap import Bootstrap
from flask_login import LoginManager
from .config import Config
from .auth import auth
from .models import UserModel
login_manager = LoginManager()
login_manager.login_view = 'auth.login'
@login_manager.user_loader
def load_user(username):
return UserModel.query(username)
def create_app():
app = Flask(__name__)
bootstrap = Bootstrap(app)
app.config.from_object(Config)
login_manager.init_app(app)
app.register_blueprint(auth)
return app | true | true |
1c385763e55768956dcf5c96c892d9d54ca34977 | 31,252 | py | Python | libqtile/backend/wayland/window.py | flexagoon/qtile | ecf79a91da058dc9f8dd032a42e13eac9c2a270d | [
"MIT"
] | null | null | null | libqtile/backend/wayland/window.py | flexagoon/qtile | ecf79a91da058dc9f8dd032a42e13eac9c2a270d | [
"MIT"
] | null | null | null | libqtile/backend/wayland/window.py | flexagoon/qtile | ecf79a91da058dc9f8dd032a42e13eac9c2a270d | [
"MIT"
] | null | null | null | # Copyright (c) 2021 Matt Colligan
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import annotations
import functools
import typing
import cairocffi
import pywayland
from wlroots import ffi
from wlroots.util.edges import Edges
from wlroots.wlr_types import Box, Texture
from wlroots.wlr_types.layer_shell_v1 import LayerSurfaceV1
from wlroots.wlr_types.xdg_shell import (
XdgPopup,
XdgSurface,
XdgTopLevelSetFullscreenEvent,
)
from libqtile import hook, utils
from libqtile.backend import base
from libqtile.backend.base import FloatStates
from libqtile.backend.wayland.drawer import Drawer
from libqtile.backend.wayland.wlrq import DRM_FORMAT_ARGB8888, HasListeners
from libqtile.command.base import CommandError
from libqtile.log_utils import logger
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional, Tuple, Union
from wlroots.wlr_types.surface import SubSurface as WlrSubSurface
from libqtile.backend.wayland.core import Core
from libqtile.backend.wayland.output import Output
from libqtile.core.manager import Qtile
from libqtile.group import _Group
from libqtile.utils import ColorType
EDGES_TILED = Edges.TOP | Edges.BOTTOM | Edges.LEFT | Edges.RIGHT
EDGES_FLOAT = Edges.NONE
@functools.lru_cache()
def _rgb(color: ColorType) -> ffi.CData:
"""Helper to create and cache float[4] arrays for border painting"""
if isinstance(color, ffi.CData):
return color
return ffi.new("float[4]", utils.rgb(color))
# Window manages XdgSurfaces, Static manages XdgSurfaces and LayerSurfaceV1s
SurfaceType = typing.Union[XdgSurface, LayerSurfaceV1]
class Window(base.Window, HasListeners):
def __init__(self, core: Core, qtile: Qtile, surface: SurfaceType, wid: int):
base.Window.__init__(self)
self.core = core
self.qtile = qtile
self.surface = surface
self._group: Optional[_Group] = None
self.popups: List[XdgPopupWindow] = []
self.subsurfaces: List[SubSurface] = []
self._wid = wid
self._mapped: bool = False
self.x = 0
self.y = 0
self.bordercolor: List[ffi.CData] = [_rgb((0, 0, 0, 1))]
self.opacity: float = 1.0
self._outputs: List[Output] = []
# These start as None and are set in the first place() call
self._width: Optional[int] = None
self._height: Optional[int] = None
assert isinstance(surface, XdgSurface)
if surface.toplevel.title:
self.name = surface.toplevel.title
self._app_id: Optional[str] = surface.toplevel.app_id
surface.set_tiled(EDGES_TILED)
self._float_state = FloatStates.NOT_FLOATING
self.float_x: Optional[int] = None
self.float_y: Optional[int] = None
self._float_width: int = self.width
self._float_height: int = self.height
self.add_listener(surface.map_event, self._on_map)
self.add_listener(surface.unmap_event, self._on_unmap)
self.add_listener(surface.destroy_event, self._on_destroy)
self.add_listener(surface.new_popup_event, self._on_new_popup)
self.add_listener(surface.toplevel.request_fullscreen_event, self._on_request_fullscreen)
self.add_listener(surface.toplevel.set_title_event, self._on_set_title)
self.add_listener(surface.toplevel.set_app_id_event, self._on_set_app_id)
self.add_listener(surface.surface.commit_event, self._on_commit)
self.add_listener(surface.surface.new_subsurface_event, self._on_new_subsurface)
def finalize(self):
self.finalize_listeners()
for subsurface in self.subsurfaces:
subsurface.finalize()
@property
def wid(self):
return self._wid
@property
def width(self) -> int:
if self._width is None:
return self.surface.surface.current.width
return self._width
@width.setter
def width(self, width: int) -> None:
self._width = width
@property
def height(self) -> int:
if self._height is None:
return self.surface.surface.current.height
return self._height
@height.setter
def height(self, height: int) -> None:
self._height = height
@property
def group(self) -> Optional[_Group]:
return self._group
@group.setter
def group(self, group: Optional[_Group]) -> None:
self._group = group
@property
def mapped(self) -> bool:
return self._mapped
@mapped.setter
def mapped(self, mapped: bool) -> None:
"""We keep track of which windows are mapped to we know which to render"""
if mapped == self._mapped:
return
self._mapped = mapped
if mapped:
self.core.mapped_windows.append(self)
else:
self.core.mapped_windows.remove(self)
self.core.stack_windows()
def _on_map(self, _listener, _data):
logger.debug("Signal: window map")
if self in self.core.pending_windows:
self.core.pending_windows.remove(self)
logger.debug(f"Managing new top-level window with window ID: {self.wid}")
self.qtile.manage(self)
if self.group.screen:
self.mapped = True
self.core.focus_window(self)
def _on_unmap(self, _listener, _data):
logger.debug("Signal: window unmap")
self.mapped = False
self.damage()
seat = self.core.seat
if not seat.destroyed:
if self.surface.surface == seat.keyboard_state.focused_surface:
seat.keyboard_clear_focus()
def _on_destroy(self, _listener, _data):
logger.debug("Signal: window destroy")
if self.mapped:
logger.warning("Window destroyed before unmap event.")
self.mapped = False
self.qtile.unmanage(self.wid)
self.finalize()
def _on_new_popup(self, _listener, xdg_popup: XdgPopup):
logger.debug("Signal: window new_popup")
self.popups.append(XdgPopupWindow(self, xdg_popup))
def _on_request_fullscreen(self, _listener, event: XdgTopLevelSetFullscreenEvent):
logger.debug("Signal: window request_fullscreen")
if self.qtile.config.auto_fullscreen:
self.fullscreen = event.fullscreen
def _on_set_title(self, _listener, _data):
logger.debug("Signal: window set_title")
self.name = self.surface.toplevel.title
hook.fire('client_name_updated', self)
def _on_set_app_id(self, _listener, _data):
logger.debug("Signal: window set_app_id")
self._app_id = self.surface.toplevel.app_id
def _on_commit(self, _listener, _data):
self.damage()
def _on_new_subsurface(self, _listener, subsurface: WlrSubSurface):
self.subsurfaces.append(SubSurface(self, subsurface))
def has_fixed_size(self) -> bool:
assert isinstance(self.surface, XdgSurface)
state = self.surface.toplevel._ptr.current
return (
0 < state.min_width == state.max_width and
0 < state.min_height == state.max_height
)
def is_transient_for(self) -> Optional[base.WindowType]:
"""What window is this window a transient window for?"""
assert isinstance(self.surface, XdgSurface)
parent = self.surface.toplevel.parent
if parent:
for win in self.qtile.windows_map.values():
if not isinstance(win, Internal) and win.surface == parent: # type: ignore
return win
return None
def _find_outputs(self):
"""Find the outputs on which this window can be seen."""
self._outputs = [o for o in self.core.outputs if o.contains(self)]
def damage(self) -> None:
for output in self._outputs:
output.damage()
def hide(self):
if self.mapped:
self.surface.unmap_event.emit()
def unhide(self):
if not self.mapped:
self.surface.map_event.emit()
def kill(self):
self.surface.send_close()
def get_pid(self) -> int:
pid = pywayland.ffi.new("pid_t *")
pywayland.lib.wl_client_get_credentials(
self.surface._ptr.client.client, pid, ffi.NULL, ffi.NULL
)
return pid[0]
def get_wm_class(self) -> Optional[List]:
if self._app_id:
return [self._app_id]
return None
def togroup(self, group_name=None, *, switch_group=False):
"""Move window to a specified group
Also switch to that group if switch_group is True.
"""
if group_name is None:
group = self.qtile.current_group
else:
group = self.qtile.groups_map.get(group_name)
if group is None:
raise CommandError("No such group: %s" % group_name)
if self.group is not group:
self.hide()
if self.group:
if self.group.screen:
# for floats remove window offset
self.x -= self.group.screen.x
self.group.remove(self)
if group.screen and self.x < group.screen.x:
self.x += group.screen.x
group.add(self)
if switch_group:
group.cmd_toscreen(toggle=False)
def paint_borders(self, color: Union[ColorType, List[ColorType]], width) -> None:
if color:
if isinstance(color, list):
if len(color) > width:
color = color[:width]
self.bordercolor = [_rgb(c) for c in color]
else:
self.bordercolor = [_rgb(color)]
self.borderwidth = width
@property
def floating(self):
return self._float_state != FloatStates.NOT_FLOATING
@floating.setter
def floating(self, do_float):
if do_float and self._float_state == FloatStates.NOT_FLOATING:
if self.group and self.group.screen:
screen = self.group.screen
if not self._float_width: # These might start as 0
self._float_width = self.width
self._float_height = self.height
self._enablefloating(
screen.x + self.float_x,
screen.y + self.float_y,
self._float_width,
self._float_height
)
else:
# if we are setting floating early, e.g. from a hook, we don't have a screen yet
self._float_state = FloatStates.FLOATING
elif (not do_float) and self._float_state != FloatStates.NOT_FLOATING:
if self._float_state == FloatStates.FLOATING:
# store last size
self._float_width = self.width
self._float_height = self.height
self._float_state = FloatStates.NOT_FLOATING
self.group.mark_floating(self, False)
hook.fire('float_change')
@property
def fullscreen(self):
return self._float_state == FloatStates.FULLSCREEN
@fullscreen.setter
def fullscreen(self, do_full):
self.surface.set_fullscreen(do_full)
if do_full:
screen = self.group.screen or \
self.qtile.find_closest_screen(self.x, self.y)
self._enablefloating(
screen.x,
screen.y,
screen.width,
screen.height,
new_float_state=FloatStates.FULLSCREEN
)
return
if self._float_state == FloatStates.FULLSCREEN:
self.floating = False
@property
def maximized(self):
return self._float_state == FloatStates.MAXIMIZED
@maximized.setter
def maximized(self, do_maximize):
if do_maximize:
screen = self.group.screen or \
self.qtile.find_closest_screen(self.x, self.y)
self._enablefloating(
screen.dx,
screen.dy,
screen.dwidth,
screen.dheight,
new_float_state=FloatStates.MAXIMIZED
)
else:
if self._float_state == FloatStates.MAXIMIZED:
self.floating = False
@property
def minimized(self):
return self._float_state == FloatStates.MINIMIZED
@minimized.setter
def minimized(self, do_minimize):
if do_minimize:
if self._float_state != FloatStates.MINIMIZED:
self._enablefloating(new_float_state=FloatStates.MINIMIZED)
else:
if self._float_state == FloatStates.MINIMIZED:
self.floating = False
def focus(self, warp: bool) -> None:
self.core.focus_window(self)
if isinstance(self, base.Internal):
# self.core.focus_window is enough for internal windows
return
if warp and self.qtile.config.cursor_warp:
self.core.warp_pointer(
self.x + self.width // 2,
self.y + self.height // 2,
)
hook.fire("client_focus", self)
def place(self, x, y, width, height, borderwidth, bordercolor,
above=False, margin=None, respect_hints=False):
# Adjust the placement to account for layout margins, if there are any.
if margin is not None:
if isinstance(margin, int):
margin = [margin] * 4
x += margin[3]
y += margin[0]
width -= margin[1] + margin[3]
height -= margin[0] + margin[2]
if respect_hints:
state = self.surface.toplevel._ptr.current
width = max(width, state.min_width)
height = max(height, state.min_height)
if state.max_width:
width = min(width, state.max_width)
if state.max_height:
height = min(height, state.max_height)
# save x and y float offset
if self.group is not None and self.group.screen is not None:
self.float_x = x - self.group.screen.x
self.float_y = y - self.group.screen.y
self.x = x
self.y = y
self.surface.set_size(int(width), int(height))
self._width = int(width)
self._height = int(height)
self.paint_borders(bordercolor, borderwidth)
if above and self._mapped:
self.core.mapped_windows.remove(self)
self.core.mapped_windows.append(self)
self.core.stack_windows()
self._find_outputs()
self.damage()
def _tweak_float(self, x=None, y=None, dx=0, dy=0, w=None, h=None, dw=0, dh=0):
if x is None:
x = self.x
x += dx
if y is None:
y = self.y
y += dy
if w is None:
w = self.width
w += dw
if h is None:
h = self.height
h += dh
if h < 0:
h = 0
if w < 0:
w = 0
screen = self.qtile.find_closest_screen(
self.x + self.width // 2, self.y + self.height // 2
)
if self.group and screen is not None and screen != self.group.screen:
self.group.remove(self, force=True)
screen.group.add(self, force=True)
self.qtile.focus_screen(screen.index)
self._reconfigure_floating(x, y, w, h)
def _enablefloating(self, x=None, y=None, w=None, h=None,
new_float_state=FloatStates.FLOATING):
self._reconfigure_floating(x, y, w, h, new_float_state)
def _reconfigure_floating(self, x, y, w, h, new_float_state=FloatStates.FLOATING):
if new_float_state == FloatStates.MINIMIZED:
self.hide()
else:
self.place(
x, y, w, h,
self.borderwidth, self.bordercolor, above=True, respect_hints=True
)
if self._float_state != new_float_state:
self._float_state = new_float_state
if self.group: # may be not, if it's called from hook
self.group.mark_floating(self, True)
hook.fire('float_change')
def info(self) -> Dict:
"""Return a dictionary of info."""
float_info = {
"x": self.float_x,
"y": self.float_y,
"width": self._float_width,
"height": self._float_height,
}
return dict(
name=self.name,
x=self.x,
y=self.y,
width=self.width,
height=self.height,
group=self.group.name if self.group else None,
id=self.wid,
float_info=float_info,
floating=self._float_state != FloatStates.NOT_FLOATING,
maximized=self._float_state == FloatStates.MAXIMIZED,
minimized=self._float_state == FloatStates.MINIMIZED,
fullscreen=self._float_state == FloatStates.FULLSCREEN
)
def _items(self, name: str) -> ItemT:
if name == "group":
return True, []
elif name == "layout":
return True, list(range(len(self.group.layouts)))
elif name == "screen" and self.group.screen is not None:
return True, []
return None
def _select(self, name, sel):
if name == "group":
return self.group
elif name == "layout":
if sel is None:
return self.group.layout
else:
return utils.lget(self.group.layouts, sel)
elif name == "screen":
return self.group.screen
def cmd_focus(self, warp: bool = True) -> None:
"""Focuses the window."""
self.focus(warp)
def cmd_move_floating(self, dx: int, dy: int) -> None:
self._tweak_float(dx=dx, dy=dy)
def cmd_resize_floating(self, dw: int, dh: int) -> None:
self._tweak_float(dw=dw, dh=dh)
def cmd_set_position_floating(self, x: int, y: int) -> None:
self._tweak_float(x=x, y=y)
def cmd_set_size_floating(self, w: int, h: int) -> None:
self._tweak_float(w=w, h=h)
def cmd_place(self, x, y, width, height, borderwidth, bordercolor,
above=False, margin=None):
self.place(x, y, width, height, borderwidth, bordercolor, above,
margin)
def cmd_get_position(self) -> Tuple[int, int]:
return self.x, self.y
def cmd_get_size(self) -> Tuple[int, int]:
return self.width, self.height
def cmd_toggle_floating(self) -> None:
self.floating = not self.floating
def cmd_enable_floating(self):
self.floating = True
def cmd_disable_floating(self):
self.floating = False
def cmd_toggle_maximize(self) -> None:
self.maximized = not self.maximized
def cmd_toggle_minimize(self) -> None:
self.minimized = not self.minimized
def cmd_toggle_fullscreen(self) -> None:
self.fullscreen = not self.fullscreen
def cmd_enable_fullscreen(self) -> None:
self.fullscreen = True
def cmd_disable_fullscreen(self) -> None:
self.fullscreen = False
def cmd_bring_to_front(self) -> None:
if self.mapped:
self.core.mapped_windows.remove(self)
self.core.mapped_windows.append(self)
self.core.stack_windows()
def cmd_kill(self) -> None:
self.kill()
class Internal(base.Internal, Window):
"""
Internal windows are simply textures controlled by the compositor.
"""
texture: Texture
def __init__(
self, core: Core, qtile: Qtile, x: int, y: int, width: int, height: int
):
self.core = core
self.qtile = qtile
self._group: Optional[_Group] = None
self._mapped: bool = False
self._wid: int = self.core.new_wid()
self.x: int = x
self.y: int = y
self.opacity: float = 1.0
self._width: int = width
self._height: int = height
self._outputs: List[Output] = []
self._find_outputs()
self._reset_texture()
def finalize(self):
self.hide()
def _reset_texture(self):
clear = cairocffi.ImageSurface(cairocffi.FORMAT_ARGB32, self.width, self.height)
with cairocffi.Context(clear) as context:
context.set_source_rgba(*utils.rgb("#000000"))
context.paint()
self.texture = Texture.from_pixels(
self.core.renderer,
DRM_FORMAT_ARGB8888,
cairocffi.ImageSurface.format_stride_for_width(cairocffi.FORMAT_ARGB32, self.width),
self.width,
self.height,
cairocffi.cairo.cairo_image_surface_get_data(clear._pointer),
)
def create_drawer(self, width: int, height: int) -> Drawer:
"""Create a Drawer that draws to this window."""
return Drawer(self.qtile, self, width, height)
@property
def width(self) -> int:
return self._width
@width.setter
def width(self, value: int) -> None:
self._width = value
@property
def height(self) -> int:
return self._height
@height.setter
def height(self, value: int) -> None:
self._height = value
def hide(self) -> None:
self.mapped = False
self.damage()
def unhide(self) -> None:
self.mapped = True
self.damage()
def kill(self) -> None:
self.hide()
del self.qtile.windows_map[self.wid]
def place(self, x, y, width, height, borderwidth, bordercolor,
above=False, margin=None, respect_hints=False):
if above and self._mapped:
self.core.mapped_windows.remove(self)
self.core.mapped_windows.append(self)
self.core.stack_windows()
self.x = x
self.y = y
needs_reset = width != self.width or height != self.height
self.width = width
self.height = height
if needs_reset:
self._reset_texture()
self._find_outputs()
self.damage()
def info(self) -> Dict:
"""Return a dictionary of info."""
return dict(
x=self.x,
y=self.y,
width=self.width,
height=self.height,
id=self.wid,
)
class Static(base.Static, Window):
"""
Static windows represent both regular windows made static by the user and layer
surfaces created as part of the wlr layer shell protocol.
"""
def __init__(
self,
core: Core,
qtile: Qtile,
surface: SurfaceType,
wid: int,
):
base.Static.__init__(self)
self.core = core
self.qtile = qtile
self._group: Optional[_Group] = None
self.surface = surface
self.subsurfaces: List[SubSurface] = []
self._wid = wid
self._mapped: bool = False
self.x = 0
self.y = 0
self.borderwidth: int = 0
self.bordercolor: List[ffi.CData] = [_rgb((0, 0, 0, 1))]
self.opacity: float = 1.0
self._outputs: List[Output] = []
self._float_state = FloatStates.FLOATING
self.defunct = True
self.is_layer = False
self.screen = qtile.current_screen
self.add_listener(surface.map_event, self._on_map)
self.add_listener(surface.unmap_event, self._on_unmap)
self.add_listener(surface.destroy_event, self._on_destroy)
self.add_listener(surface.surface.commit_event, self._on_commit)
if isinstance(surface, LayerSurfaceV1):
self.is_layer = True
if surface.output is None:
surface.output = core.output_layout.output_at(core.cursor.x, core.cursor.y)
self.output = core.output_from_wlr_output(surface.output)
self.screen = self.output.screen
self.mapped = True
self._outputs.append(self.output)
else:
self._find_outputs()
@property
def mapped(self) -> bool:
# This is identical to the parent class' version but mypy has a bug that
# triggers a false positive: https://github.com/python/mypy/issues/1465
return self._mapped
@mapped.setter
def mapped(self, mapped: bool) -> None:
if mapped == self._mapped:
return
self._mapped = mapped
if isinstance(self.surface, LayerSurfaceV1):
layer = self.output.layers[self.surface.client_pending.layer]
if mapped:
layer.append(self)
else:
layer.remove(self)
if self.reserved_space:
self.qtile.free_reserved_space(self.reserved_space, self.screen)
self.output.organise_layers()
else:
if mapped:
self.core.mapped_windows.append(self)
else:
self.core.mapped_windows.remove(self)
self.core.stack_windows()
def _on_map(self, _listener, data):
logger.debug("Signal: window map")
self.mapped = True
if self.is_layer:
self.output.organise_layers()
self.core.focus_window(self, self.surface.surface)
def _on_unmap(self, _listener, data):
logger.debug("Signal: window unmap")
self.mapped = False
if self.surface.surface == self.core.seat.keyboard_state.focused_surface:
group = self.qtile.current_screen.group
if group.current_window:
group.focus(group.current_window, warp=self.qtile.config.cursor_warp)
else:
self.core.seat.keyboard_clear_focus()
if self.is_layer:
self.output.organise_layers()
self.damage()
def has_fixed_size(self) -> bool:
return False
def kill(self):
if self.is_layer:
self.surface.close()
else:
self.surface.send_close()
def place(self, x, y, width, height, borderwidth, bordercolor,
above=False, margin=None, respect_hints=False):
self.x = x
self.y = y
if self.is_layer:
self.surface.configure(width, height)
else:
self.surface.set_size(int(width), int(height))
self.paint_borders(bordercolor, borderwidth)
self.damage()
def cmd_bring_to_front(self) -> None:
if self.mapped and isinstance(self.surface, XdgSurface):
self.core.mapped_windows.remove(self)
self.core.mapped_windows.append(self)
self.core.stack_windows()
WindowType = typing.Union[Window, Internal, Static]
class XdgPopupWindow(HasListeners):
"""
This represents a single `struct wlr_xdg_popup` object and is owned by a single
parent window (of `Union[WindowType, XdgPopupWindow]`). wlroots does most of the
work for us, but we need to listen to certain events so that we know when to render
frames and we need to unconstrain the popups so they are completely visible.
"""
def __init__(self, parent: Union[WindowType, XdgPopupWindow], xdg_popup: XdgPopup):
self.parent = parent
self.xdg_popup = xdg_popup
self.core: Core = parent.core
self.popups: List[XdgPopupWindow] = []
# Keep on output
if isinstance(parent, XdgPopupWindow):
# This is a nested XdgPopup
self.output: Output = parent.output
self.output_box: Box = parent.output_box
else:
# Parent is an XdgSurface; This is a first-level XdgPopup
box = xdg_popup.base.get_geometry()
lx, ly = self.core.output_layout.closest_point(parent.x + box.x, parent.y + box.y)
wlr_output = self.core.output_layout.output_at(lx, ly)
self.output = wlr_output.data
box = Box(*self.output.get_geometry())
box.x = round(box.x - lx)
box.y = round(box.y - ly)
self.output_box = box
xdg_popup.unconstrain_from_box(self.output_box)
self.add_listener(xdg_popup.base.map_event, self._on_map)
self.add_listener(xdg_popup.base.unmap_event, self._on_unmap)
self.add_listener(xdg_popup.base.destroy_event, self._on_destroy)
self.add_listener(xdg_popup.base.new_popup_event, self._on_new_popup)
self.add_listener(xdg_popup.base.surface.commit_event, self._on_commit)
def _on_map(self, _listener, _data):
logger.debug("Signal: popup map")
self.output.damage()
def _on_unmap(self, _listener, _data):
logger.debug("Signal: popup unmap")
self.output.damage()
def _on_destroy(self, _listener, _data):
logger.debug("Signal: popup destroy")
self.finalize_listeners()
self.output.damage()
def _on_new_popup(self, _listener, xdg_popup: XdgPopup):
logger.debug("Signal: popup new_popup")
self.popups.append(XdgPopupWindow(self, xdg_popup))
def _on_commit(self, _listener, _data):
self.output.damage()
class SubSurface(HasListeners):
"""
This represents a single `struct wlr_subsurface` object and is owned by a single
parent window (of `Union[WindowType, SubSurface]`). We only need to track them so
that we can listen to their commit events and render accordingly.
"""
def __init__(self, parent: Union[WindowType, SubSurface], subsurface: WlrSubSurface):
self.parent = parent
self.subsurfaces: List[SubSurface] = []
self.add_listener(subsurface.destroy_event, self._on_destroy)
self.add_listener(subsurface.surface.commit_event, parent._on_commit)
self.add_listener(subsurface.surface.new_subsurface_event, self._on_new_subsurface)
def finalize(self):
self.finalize_listeners()
for subsurface in self.subsurfaces:
subsurface.finalize()
self.parent.subsurfaces.remove(self)
def _on_destroy(self, _listener, _data):
self.finalize()
def _on_commit(self, _listener, _data):
self.parent._on_commit(None, None)
def _on_new_subsurface(self, _listener, subsurface: WlrSubSurface):
self.subsurfaces.append(SubSurface(self, subsurface))
| 34.043573 | 97 | 0.615001 |
from __future__ import annotations
import functools
import typing
import cairocffi
import pywayland
from wlroots import ffi
from wlroots.util.edges import Edges
from wlroots.wlr_types import Box, Texture
from wlroots.wlr_types.layer_shell_v1 import LayerSurfaceV1
from wlroots.wlr_types.xdg_shell import (
XdgPopup,
XdgSurface,
XdgTopLevelSetFullscreenEvent,
)
from libqtile import hook, utils
from libqtile.backend import base
from libqtile.backend.base import FloatStates
from libqtile.backend.wayland.drawer import Drawer
from libqtile.backend.wayland.wlrq import DRM_FORMAT_ARGB8888, HasListeners
from libqtile.command.base import CommandError
from libqtile.log_utils import logger
if typing.TYPE_CHECKING:
from typing import Dict, List, Optional, Tuple, Union
from wlroots.wlr_types.surface import SubSurface as WlrSubSurface
from libqtile.backend.wayland.core import Core
from libqtile.backend.wayland.output import Output
from libqtile.core.manager import Qtile
from libqtile.group import _Group
from libqtile.utils import ColorType
EDGES_TILED = Edges.TOP | Edges.BOTTOM | Edges.LEFT | Edges.RIGHT
EDGES_FLOAT = Edges.NONE
@functools.lru_cache()
def _rgb(color: ColorType) -> ffi.CData:
if isinstance(color, ffi.CData):
return color
return ffi.new("float[4]", utils.rgb(color))
SurfaceType = typing.Union[XdgSurface, LayerSurfaceV1]
class Window(base.Window, HasListeners):
def __init__(self, core: Core, qtile: Qtile, surface: SurfaceType, wid: int):
base.Window.__init__(self)
self.core = core
self.qtile = qtile
self.surface = surface
self._group: Optional[_Group] = None
self.popups: List[XdgPopupWindow] = []
self.subsurfaces: List[SubSurface] = []
self._wid = wid
self._mapped: bool = False
self.x = 0
self.y = 0
self.bordercolor: List[ffi.CData] = [_rgb((0, 0, 0, 1))]
self.opacity: float = 1.0
self._outputs: List[Output] = []
self._width: Optional[int] = None
self._height: Optional[int] = None
assert isinstance(surface, XdgSurface)
if surface.toplevel.title:
self.name = surface.toplevel.title
self._app_id: Optional[str] = surface.toplevel.app_id
surface.set_tiled(EDGES_TILED)
self._float_state = FloatStates.NOT_FLOATING
self.float_x: Optional[int] = None
self.float_y: Optional[int] = None
self._float_width: int = self.width
self._float_height: int = self.height
self.add_listener(surface.map_event, self._on_map)
self.add_listener(surface.unmap_event, self._on_unmap)
self.add_listener(surface.destroy_event, self._on_destroy)
self.add_listener(surface.new_popup_event, self._on_new_popup)
self.add_listener(surface.toplevel.request_fullscreen_event, self._on_request_fullscreen)
self.add_listener(surface.toplevel.set_title_event, self._on_set_title)
self.add_listener(surface.toplevel.set_app_id_event, self._on_set_app_id)
self.add_listener(surface.surface.commit_event, self._on_commit)
self.add_listener(surface.surface.new_subsurface_event, self._on_new_subsurface)
def finalize(self):
self.finalize_listeners()
for subsurface in self.subsurfaces:
subsurface.finalize()
@property
def wid(self):
return self._wid
@property
def width(self) -> int:
if self._width is None:
return self.surface.surface.current.width
return self._width
@width.setter
def width(self, width: int) -> None:
self._width = width
@property
def height(self) -> int:
if self._height is None:
return self.surface.surface.current.height
return self._height
@height.setter
def height(self, height: int) -> None:
self._height = height
@property
def group(self) -> Optional[_Group]:
return self._group
@group.setter
def group(self, group: Optional[_Group]) -> None:
self._group = group
@property
def mapped(self) -> bool:
return self._mapped
@mapped.setter
def mapped(self, mapped: bool) -> None:
if mapped == self._mapped:
return
self._mapped = mapped
if mapped:
self.core.mapped_windows.append(self)
else:
self.core.mapped_windows.remove(self)
self.core.stack_windows()
def _on_map(self, _listener, _data):
logger.debug("Signal: window map")
if self in self.core.pending_windows:
self.core.pending_windows.remove(self)
logger.debug(f"Managing new top-level window with window ID: {self.wid}")
self.qtile.manage(self)
if self.group.screen:
self.mapped = True
self.core.focus_window(self)
def _on_unmap(self, _listener, _data):
logger.debug("Signal: window unmap")
self.mapped = False
self.damage()
seat = self.core.seat
if not seat.destroyed:
if self.surface.surface == seat.keyboard_state.focused_surface:
seat.keyboard_clear_focus()
def _on_destroy(self, _listener, _data):
logger.debug("Signal: window destroy")
if self.mapped:
logger.warning("Window destroyed before unmap event.")
self.mapped = False
self.qtile.unmanage(self.wid)
self.finalize()
def _on_new_popup(self, _listener, xdg_popup: XdgPopup):
logger.debug("Signal: window new_popup")
self.popups.append(XdgPopupWindow(self, xdg_popup))
def _on_request_fullscreen(self, _listener, event: XdgTopLevelSetFullscreenEvent):
logger.debug("Signal: window request_fullscreen")
if self.qtile.config.auto_fullscreen:
self.fullscreen = event.fullscreen
def _on_set_title(self, _listener, _data):
logger.debug("Signal: window set_title")
self.name = self.surface.toplevel.title
hook.fire('client_name_updated', self)
def _on_set_app_id(self, _listener, _data):
logger.debug("Signal: window set_app_id")
self._app_id = self.surface.toplevel.app_id
def _on_commit(self, _listener, _data):
self.damage()
def _on_new_subsurface(self, _listener, subsurface: WlrSubSurface):
self.subsurfaces.append(SubSurface(self, subsurface))
def has_fixed_size(self) -> bool:
assert isinstance(self.surface, XdgSurface)
state = self.surface.toplevel._ptr.current
return (
0 < state.min_width == state.max_width and
0 < state.min_height == state.max_height
)
def is_transient_for(self) -> Optional[base.WindowType]:
assert isinstance(self.surface, XdgSurface)
parent = self.surface.toplevel.parent
if parent:
for win in self.qtile.windows_map.values():
if not isinstance(win, Internal) and win.surface == parent:
return win
return None
def _find_outputs(self):
self._outputs = [o for o in self.core.outputs if o.contains(self)]
def damage(self) -> None:
for output in self._outputs:
output.damage()
def hide(self):
if self.mapped:
self.surface.unmap_event.emit()
def unhide(self):
if not self.mapped:
self.surface.map_event.emit()
def kill(self):
self.surface.send_close()
def get_pid(self) -> int:
pid = pywayland.ffi.new("pid_t *")
pywayland.lib.wl_client_get_credentials(
self.surface._ptr.client.client, pid, ffi.NULL, ffi.NULL
)
return pid[0]
def get_wm_class(self) -> Optional[List]:
if self._app_id:
return [self._app_id]
return None
def togroup(self, group_name=None, *, switch_group=False):
if group_name is None:
group = self.qtile.current_group
else:
group = self.qtile.groups_map.get(group_name)
if group is None:
raise CommandError("No such group: %s" % group_name)
if self.group is not group:
self.hide()
if self.group:
if self.group.screen:
self.x -= self.group.screen.x
self.group.remove(self)
if group.screen and self.x < group.screen.x:
self.x += group.screen.x
group.add(self)
if switch_group:
group.cmd_toscreen(toggle=False)
def paint_borders(self, color: Union[ColorType, List[ColorType]], width) -> None:
if color:
if isinstance(color, list):
if len(color) > width:
color = color[:width]
self.bordercolor = [_rgb(c) for c in color]
else:
self.bordercolor = [_rgb(color)]
self.borderwidth = width
@property
def floating(self):
return self._float_state != FloatStates.NOT_FLOATING
@floating.setter
def floating(self, do_float):
if do_float and self._float_state == FloatStates.NOT_FLOATING:
if self.group and self.group.screen:
screen = self.group.screen
if not self._float_width:
self._float_width = self.width
self._float_height = self.height
self._enablefloating(
screen.x + self.float_x,
screen.y + self.float_y,
self._float_width,
self._float_height
)
else:
self._float_state = FloatStates.FLOATING
elif (not do_float) and self._float_state != FloatStates.NOT_FLOATING:
if self._float_state == FloatStates.FLOATING:
# store last size
self._float_width = self.width
self._float_height = self.height
self._float_state = FloatStates.NOT_FLOATING
self.group.mark_floating(self, False)
hook.fire('float_change')
@property
def fullscreen(self):
return self._float_state == FloatStates.FULLSCREEN
@fullscreen.setter
def fullscreen(self, do_full):
self.surface.set_fullscreen(do_full)
if do_full:
screen = self.group.screen or \
self.qtile.find_closest_screen(self.x, self.y)
self._enablefloating(
screen.x,
screen.y,
screen.width,
screen.height,
new_float_state=FloatStates.FULLSCREEN
)
return
if self._float_state == FloatStates.FULLSCREEN:
self.floating = False
@property
def maximized(self):
return self._float_state == FloatStates.MAXIMIZED
@maximized.setter
def maximized(self, do_maximize):
if do_maximize:
screen = self.group.screen or \
self.qtile.find_closest_screen(self.x, self.y)
self._enablefloating(
screen.dx,
screen.dy,
screen.dwidth,
screen.dheight,
new_float_state=FloatStates.MAXIMIZED
)
else:
if self._float_state == FloatStates.MAXIMIZED:
self.floating = False
@property
def minimized(self):
return self._float_state == FloatStates.MINIMIZED
@minimized.setter
def minimized(self, do_minimize):
if do_minimize:
if self._float_state != FloatStates.MINIMIZED:
self._enablefloating(new_float_state=FloatStates.MINIMIZED)
else:
if self._float_state == FloatStates.MINIMIZED:
self.floating = False
def focus(self, warp: bool) -> None:
self.core.focus_window(self)
if isinstance(self, base.Internal):
# self.core.focus_window is enough for internal windows
return
if warp and self.qtile.config.cursor_warp:
self.core.warp_pointer(
self.x + self.width // 2,
self.y + self.height // 2,
)
hook.fire("client_focus", self)
def place(self, x, y, width, height, borderwidth, bordercolor,
above=False, margin=None, respect_hints=False):
# Adjust the placement to account for layout margins, if there are any.
if margin is not None:
if isinstance(margin, int):
margin = [margin] * 4
x += margin[3]
y += margin[0]
width -= margin[1] + margin[3]
height -= margin[0] + margin[2]
if respect_hints:
state = self.surface.toplevel._ptr.current
width = max(width, state.min_width)
height = max(height, state.min_height)
if state.max_width:
width = min(width, state.max_width)
if state.max_height:
height = min(height, state.max_height)
# save x and y float offset
if self.group is not None and self.group.screen is not None:
self.float_x = x - self.group.screen.x
self.float_y = y - self.group.screen.y
self.x = x
self.y = y
self.surface.set_size(int(width), int(height))
self._width = int(width)
self._height = int(height)
self.paint_borders(bordercolor, borderwidth)
if above and self._mapped:
self.core.mapped_windows.remove(self)
self.core.mapped_windows.append(self)
self.core.stack_windows()
self._find_outputs()
self.damage()
def _tweak_float(self, x=None, y=None, dx=0, dy=0, w=None, h=None, dw=0, dh=0):
if x is None:
x = self.x
x += dx
if y is None:
y = self.y
y += dy
if w is None:
w = self.width
w += dw
if h is None:
h = self.height
h += dh
if h < 0:
h = 0
if w < 0:
w = 0
screen = self.qtile.find_closest_screen(
self.x + self.width // 2, self.y + self.height // 2
)
if self.group and screen is not None and screen != self.group.screen:
self.group.remove(self, force=True)
screen.group.add(self, force=True)
self.qtile.focus_screen(screen.index)
self._reconfigure_floating(x, y, w, h)
def _enablefloating(self, x=None, y=None, w=None, h=None,
new_float_state=FloatStates.FLOATING):
self._reconfigure_floating(x, y, w, h, new_float_state)
def _reconfigure_floating(self, x, y, w, h, new_float_state=FloatStates.FLOATING):
if new_float_state == FloatStates.MINIMIZED:
self.hide()
else:
self.place(
x, y, w, h,
self.borderwidth, self.bordercolor, above=True, respect_hints=True
)
if self._float_state != new_float_state:
self._float_state = new_float_state
if self.group: # may be not, if it's called from hook
self.group.mark_floating(self, True)
hook.fire('float_change')
def info(self) -> Dict:
float_info = {
"x": self.float_x,
"y": self.float_y,
"width": self._float_width,
"height": self._float_height,
}
return dict(
name=self.name,
x=self.x,
y=self.y,
width=self.width,
height=self.height,
group=self.group.name if self.group else None,
id=self.wid,
float_info=float_info,
floating=self._float_state != FloatStates.NOT_FLOATING,
maximized=self._float_state == FloatStates.MAXIMIZED,
minimized=self._float_state == FloatStates.MINIMIZED,
fullscreen=self._float_state == FloatStates.FULLSCREEN
)
def _items(self, name: str) -> ItemT:
if name == "group":
return True, []
elif name == "layout":
return True, list(range(len(self.group.layouts)))
elif name == "screen" and self.group.screen is not None:
return True, []
return None
def _select(self, name, sel):
if name == "group":
return self.group
elif name == "layout":
if sel is None:
return self.group.layout
else:
return utils.lget(self.group.layouts, sel)
elif name == "screen":
return self.group.screen
def cmd_focus(self, warp: bool = True) -> None:
self.focus(warp)
def cmd_move_floating(self, dx: int, dy: int) -> None:
self._tweak_float(dx=dx, dy=dy)
def cmd_resize_floating(self, dw: int, dh: int) -> None:
self._tweak_float(dw=dw, dh=dh)
def cmd_set_position_floating(self, x: int, y: int) -> None:
self._tweak_float(x=x, y=y)
def cmd_set_size_floating(self, w: int, h: int) -> None:
self._tweak_float(w=w, h=h)
def cmd_place(self, x, y, width, height, borderwidth, bordercolor,
above=False, margin=None):
self.place(x, y, width, height, borderwidth, bordercolor, above,
margin)
def cmd_get_position(self) -> Tuple[int, int]:
return self.x, self.y
def cmd_get_size(self) -> Tuple[int, int]:
return self.width, self.height
def cmd_toggle_floating(self) -> None:
self.floating = not self.floating
def cmd_enable_floating(self):
self.floating = True
def cmd_disable_floating(self):
self.floating = False
def cmd_toggle_maximize(self) -> None:
self.maximized = not self.maximized
def cmd_toggle_minimize(self) -> None:
self.minimized = not self.minimized
def cmd_toggle_fullscreen(self) -> None:
self.fullscreen = not self.fullscreen
def cmd_enable_fullscreen(self) -> None:
self.fullscreen = True
def cmd_disable_fullscreen(self) -> None:
self.fullscreen = False
def cmd_bring_to_front(self) -> None:
if self.mapped:
self.core.mapped_windows.remove(self)
self.core.mapped_windows.append(self)
self.core.stack_windows()
def cmd_kill(self) -> None:
self.kill()
class Internal(base.Internal, Window):
texture: Texture
def __init__(
self, core: Core, qtile: Qtile, x: int, y: int, width: int, height: int
):
self.core = core
self.qtile = qtile
self._group: Optional[_Group] = None
self._mapped: bool = False
self._wid: int = self.core.new_wid()
self.x: int = x
self.y: int = y
self.opacity: float = 1.0
self._width: int = width
self._height: int = height
self._outputs: List[Output] = []
self._find_outputs()
self._reset_texture()
def finalize(self):
self.hide()
def _reset_texture(self):
clear = cairocffi.ImageSurface(cairocffi.FORMAT_ARGB32, self.width, self.height)
with cairocffi.Context(clear) as context:
context.set_source_rgba(*utils.rgb("#000000"))
context.paint()
self.texture = Texture.from_pixels(
self.core.renderer,
DRM_FORMAT_ARGB8888,
cairocffi.ImageSurface.format_stride_for_width(cairocffi.FORMAT_ARGB32, self.width),
self.width,
self.height,
cairocffi.cairo.cairo_image_surface_get_data(clear._pointer),
)
def create_drawer(self, width: int, height: int) -> Drawer:
return Drawer(self.qtile, self, width, height)
@property
def width(self) -> int:
return self._width
@width.setter
def width(self, value: int) -> None:
self._width = value
@property
def height(self) -> int:
return self._height
@height.setter
def height(self, value: int) -> None:
self._height = value
def hide(self) -> None:
self.mapped = False
self.damage()
def unhide(self) -> None:
self.mapped = True
self.damage()
def kill(self) -> None:
self.hide()
del self.qtile.windows_map[self.wid]
def place(self, x, y, width, height, borderwidth, bordercolor,
above=False, margin=None, respect_hints=False):
if above and self._mapped:
self.core.mapped_windows.remove(self)
self.core.mapped_windows.append(self)
self.core.stack_windows()
self.x = x
self.y = y
needs_reset = width != self.width or height != self.height
self.width = width
self.height = height
if needs_reset:
self._reset_texture()
self._find_outputs()
self.damage()
def info(self) -> Dict:
return dict(
x=self.x,
y=self.y,
width=self.width,
height=self.height,
id=self.wid,
)
class Static(base.Static, Window):
def __init__(
self,
core: Core,
qtile: Qtile,
surface: SurfaceType,
wid: int,
):
base.Static.__init__(self)
self.core = core
self.qtile = qtile
self._group: Optional[_Group] = None
self.surface = surface
self.subsurfaces: List[SubSurface] = []
self._wid = wid
self._mapped: bool = False
self.x = 0
self.y = 0
self.borderwidth: int = 0
self.bordercolor: List[ffi.CData] = [_rgb((0, 0, 0, 1))]
self.opacity: float = 1.0
self._outputs: List[Output] = []
self._float_state = FloatStates.FLOATING
self.defunct = True
self.is_layer = False
self.screen = qtile.current_screen
self.add_listener(surface.map_event, self._on_map)
self.add_listener(surface.unmap_event, self._on_unmap)
self.add_listener(surface.destroy_event, self._on_destroy)
self.add_listener(surface.surface.commit_event, self._on_commit)
if isinstance(surface, LayerSurfaceV1):
self.is_layer = True
if surface.output is None:
surface.output = core.output_layout.output_at(core.cursor.x, core.cursor.y)
self.output = core.output_from_wlr_output(surface.output)
self.screen = self.output.screen
self.mapped = True
self._outputs.append(self.output)
else:
self._find_outputs()
@property
def mapped(self) -> bool:
# triggers a false positive: https://github.com/python/mypy/issues/1465
return self._mapped
@mapped.setter
def mapped(self, mapped: bool) -> None:
if mapped == self._mapped:
return
self._mapped = mapped
if isinstance(self.surface, LayerSurfaceV1):
layer = self.output.layers[self.surface.client_pending.layer]
if mapped:
layer.append(self)
else:
layer.remove(self)
if self.reserved_space:
self.qtile.free_reserved_space(self.reserved_space, self.screen)
self.output.organise_layers()
else:
if mapped:
self.core.mapped_windows.append(self)
else:
self.core.mapped_windows.remove(self)
self.core.stack_windows()
def _on_map(self, _listener, data):
logger.debug("Signal: window map")
self.mapped = True
if self.is_layer:
self.output.organise_layers()
self.core.focus_window(self, self.surface.surface)
def _on_unmap(self, _listener, data):
logger.debug("Signal: window unmap")
self.mapped = False
if self.surface.surface == self.core.seat.keyboard_state.focused_surface:
group = self.qtile.current_screen.group
if group.current_window:
group.focus(group.current_window, warp=self.qtile.config.cursor_warp)
else:
self.core.seat.keyboard_clear_focus()
if self.is_layer:
self.output.organise_layers()
self.damage()
def has_fixed_size(self) -> bool:
return False
def kill(self):
if self.is_layer:
self.surface.close()
else:
self.surface.send_close()
def place(self, x, y, width, height, borderwidth, bordercolor,
above=False, margin=None, respect_hints=False):
self.x = x
self.y = y
if self.is_layer:
self.surface.configure(width, height)
else:
self.surface.set_size(int(width), int(height))
self.paint_borders(bordercolor, borderwidth)
self.damage()
def cmd_bring_to_front(self) -> None:
if self.mapped and isinstance(self.surface, XdgSurface):
self.core.mapped_windows.remove(self)
self.core.mapped_windows.append(self)
self.core.stack_windows()
WindowType = typing.Union[Window, Internal, Static]
class XdgPopupWindow(HasListeners):
def __init__(self, parent: Union[WindowType, XdgPopupWindow], xdg_popup: XdgPopup):
self.parent = parent
self.xdg_popup = xdg_popup
self.core: Core = parent.core
self.popups: List[XdgPopupWindow] = []
# Keep on output
if isinstance(parent, XdgPopupWindow):
# This is a nested XdgPopup
self.output: Output = parent.output
self.output_box: Box = parent.output_box
else:
# Parent is an XdgSurface; This is a first-level XdgPopup
box = xdg_popup.base.get_geometry()
lx, ly = self.core.output_layout.closest_point(parent.x + box.x, parent.y + box.y)
wlr_output = self.core.output_layout.output_at(lx, ly)
self.output = wlr_output.data
box = Box(*self.output.get_geometry())
box.x = round(box.x - lx)
box.y = round(box.y - ly)
self.output_box = box
xdg_popup.unconstrain_from_box(self.output_box)
self.add_listener(xdg_popup.base.map_event, self._on_map)
self.add_listener(xdg_popup.base.unmap_event, self._on_unmap)
self.add_listener(xdg_popup.base.destroy_event, self._on_destroy)
self.add_listener(xdg_popup.base.new_popup_event, self._on_new_popup)
self.add_listener(xdg_popup.base.surface.commit_event, self._on_commit)
def _on_map(self, _listener, _data):
logger.debug("Signal: popup map")
self.output.damage()
def _on_unmap(self, _listener, _data):
logger.debug("Signal: popup unmap")
self.output.damage()
def _on_destroy(self, _listener, _data):
logger.debug("Signal: popup destroy")
self.finalize_listeners()
self.output.damage()
def _on_new_popup(self, _listener, xdg_popup: XdgPopup):
logger.debug("Signal: popup new_popup")
self.popups.append(XdgPopupWindow(self, xdg_popup))
def _on_commit(self, _listener, _data):
self.output.damage()
class SubSurface(HasListeners):
def __init__(self, parent: Union[WindowType, SubSurface], subsurface: WlrSubSurface):
self.parent = parent
self.subsurfaces: List[SubSurface] = []
self.add_listener(subsurface.destroy_event, self._on_destroy)
self.add_listener(subsurface.surface.commit_event, parent._on_commit)
self.add_listener(subsurface.surface.new_subsurface_event, self._on_new_subsurface)
def finalize(self):
self.finalize_listeners()
for subsurface in self.subsurfaces:
subsurface.finalize()
self.parent.subsurfaces.remove(self)
def _on_destroy(self, _listener, _data):
self.finalize()
def _on_commit(self, _listener, _data):
self.parent._on_commit(None, None)
def _on_new_subsurface(self, _listener, subsurface: WlrSubSurface):
self.subsurfaces.append(SubSurface(self, subsurface))
| true | true |
1c3857f5ef43f46f8e2416b7d4d3c2c0e9b2c929 | 3,596 | py | Python | ec2instanceconnectcli/mops.py | Hallian/aws-ec2-instance-connect-cli | 95be70889d8503964071dac227507c090a1bc980 | [
"Apache-2.0"
] | null | null | null | ec2instanceconnectcli/mops.py | Hallian/aws-ec2-instance-connect-cli | 95be70889d8503964071dac227507c090a1bc980 | [
"Apache-2.0"
] | null | null | null | ec2instanceconnectcli/mops.py | Hallian/aws-ec2-instance-connect-cli | 95be70889d8503964071dac227507c090a1bc980 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import sys
import argparse
from ec2instanceconnectcli.EC2InstanceConnectCLI import EC2InstanceConnectCLI
from ec2instanceconnectcli.EC2InstanceConnectKey import EC2InstanceConnectKey
from ec2instanceconnectcli.EC2InstanceConnectCommand import EC2InstanceConnectCommand
from ec2instanceconnectcli.EC2InstanceConnectLogger import EC2InstanceConnectLogger
from ec2instanceconnectcli import input_parser
DEFAULT_INSTANCE = ''
DEFAULT_PROFILE = None
def main(program, mode):
"""
Parses system arguments and sets defaults
Calls `ssh` or `sftp` to SSH into the Instance or transfer files.
:param program: Client program to be used for SSH/SFTP operations.
:type program: basestring
:param mode: Identifies either SSH/SFTP operation.
:type mode: basestring
"""
usage = ""
if mode == "ssh":
usage="""
mssh [-t instance_id] [-u profile] [-z availability_zone] [-r region] [supported ssh flags] target [command]
target => [user@]instance_id | [user@]hostname
[supported ssh flags] => [-l login_name] [-p port]
"""
elif mode == "sftp":
usage="""
msftp [-u aws_profile] [-z availability_zone] [supported sftp flags] target
target => [user@]instance_id[:file ...][:dir[/]] | [user@]hostname[:file ...][:dir[/]]
[supported sftp flags] => [-P port] [-b batchfile]
"""
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument('-r', '--region', action='store', help='AWS region', type=str, metavar='')
parser.add_argument('-z', '--zone', action='store', help='Availability zone', type=str, metavar='')
parser.add_argument('-u', '--profile', action='store', help='AWS Config Profile', type=str, default=DEFAULT_PROFILE, metavar='')
parser.add_argument('-t', '--instance_id', action='store', help='EC2 Instance ID. Required if target is hostname', type=str, default=DEFAULT_INSTANCE, metavar='')
parser.add_argument('-J', '--jumphost', action='store', help='EC2 Jump host Instance ID.', type=str, default=DEFAULT_INSTANCE, metavar='')
parser.add_argument('-d', '--debug', action="store_true", help='Turn on debug logging')
args = parser.parse_known_args()
logger = EC2InstanceConnectLogger(args[0].debug)
try:
instance_bundles, flags, program_command = input_parser.parseargs(args, mode)
except Exception as e:
print(str(e))
parser.print_help()
sys.exit(1)
#Generate temp key
cli_key = EC2InstanceConnectKey(logger.get_logger())
cli_command = EC2InstanceConnectCommand(program, instance_bundles, cli_key.get_priv_key_file(), flags, program_command, logger.get_logger())
try:
# TODO: Handling for if the '-i' flag is passed
cli = EC2InstanceConnectCLI(instance_bundles, cli_key.get_pub_key(), cli_command, logger.get_logger())
cli.invoke_command()
except Exception as e:
print('Failed with:\n' + str(e))
sys.exit(1)
| 44.395062 | 166 | 0.689655 |
import sys
import argparse
from ec2instanceconnectcli.EC2InstanceConnectCLI import EC2InstanceConnectCLI
from ec2instanceconnectcli.EC2InstanceConnectKey import EC2InstanceConnectKey
from ec2instanceconnectcli.EC2InstanceConnectCommand import EC2InstanceConnectCommand
from ec2instanceconnectcli.EC2InstanceConnectLogger import EC2InstanceConnectLogger
from ec2instanceconnectcli import input_parser
DEFAULT_INSTANCE = ''
DEFAULT_PROFILE = None
def main(program, mode):
usage = ""
if mode == "ssh":
usage="""
mssh [-t instance_id] [-u profile] [-z availability_zone] [-r region] [supported ssh flags] target [command]
target => [user@]instance_id | [user@]hostname
[supported ssh flags] => [-l login_name] [-p port]
"""
elif mode == "sftp":
usage="""
msftp [-u aws_profile] [-z availability_zone] [supported sftp flags] target
target => [user@]instance_id[:file ...][:dir[/]] | [user@]hostname[:file ...][:dir[/]]
[supported sftp flags] => [-P port] [-b batchfile]
"""
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument('-r', '--region', action='store', help='AWS region', type=str, metavar='')
parser.add_argument('-z', '--zone', action='store', help='Availability zone', type=str, metavar='')
parser.add_argument('-u', '--profile', action='store', help='AWS Config Profile', type=str, default=DEFAULT_PROFILE, metavar='')
parser.add_argument('-t', '--instance_id', action='store', help='EC2 Instance ID. Required if target is hostname', type=str, default=DEFAULT_INSTANCE, metavar='')
parser.add_argument('-J', '--jumphost', action='store', help='EC2 Jump host Instance ID.', type=str, default=DEFAULT_INSTANCE, metavar='')
parser.add_argument('-d', '--debug', action="store_true", help='Turn on debug logging')
args = parser.parse_known_args()
logger = EC2InstanceConnectLogger(args[0].debug)
try:
instance_bundles, flags, program_command = input_parser.parseargs(args, mode)
except Exception as e:
print(str(e))
parser.print_help()
sys.exit(1)
cli_key = EC2InstanceConnectKey(logger.get_logger())
cli_command = EC2InstanceConnectCommand(program, instance_bundles, cli_key.get_priv_key_file(), flags, program_command, logger.get_logger())
try:
cli = EC2InstanceConnectCLI(instance_bundles, cli_key.get_pub_key(), cli_command, logger.get_logger())
cli.invoke_command()
except Exception as e:
print('Failed with:\n' + str(e))
sys.exit(1)
| true | true |
1c38582751f28bdc2dffc5c4099f3b071797762e | 3,755 | py | Python | torch_geometric/datasets/flickr.py | beneisner/pytorch_geometric | 53d44a96bd2de2753b1ab1d7153c026c92606a81 | [
"MIT"
] | 1 | 2022-02-17T09:40:17.000Z | 2022-02-17T09:40:17.000Z | torch_geometric/datasets/flickr.py | beneisner/pytorch_geometric | 53d44a96bd2de2753b1ab1d7153c026c92606a81 | [
"MIT"
] | null | null | null | torch_geometric/datasets/flickr.py | beneisner/pytorch_geometric | 53d44a96bd2de2753b1ab1d7153c026c92606a81 | [
"MIT"
] | 1 | 2021-06-16T11:40:43.000Z | 2021-06-16T11:40:43.000Z | from typing import Optional, Callable, List
import json
import os.path as osp
import torch
import numpy as np
import scipy.sparse as sp
from google_drive_downloader import GoogleDriveDownloader as gdd
from torch_geometric.data import InMemoryDataset, Data
class Flickr(InMemoryDataset):
r"""The Flickr dataset from the `"GraphSAINT: Graph Sampling Based
Inductive Learning Method" <https://arxiv.org/abs/1907.04931>`_ paper,
containing descriptions and common properties of images.
Args:
root (string): Root directory where the dataset should be saved.
transform (callable, optional): A function/transform that takes in an
:obj:`torch_geometric.data.Data` object and returns a transformed
version. The data object will be transformed before every access.
(default: :obj:`None`)
pre_transform (callable, optional): A function/transform that takes in
an :obj:`torch_geometric.data.Data` object and returns a
transformed version. The data object will be transformed before
being saved to disk. (default: :obj:`None`)
"""
adj_full_id = '1crmsTbd1-2sEXsGwa2IKnIB7Zd3TmUsy'
feats_id = '1join-XdvX3anJU_MLVtick7MgeAQiWIZ'
class_map_id = '1uxIkbtg5drHTsKt-PAsZZ4_yJmgFmle9'
role_id = '1htXCtuktuCW8TR8KiKfrFDAxUgekQoV7'
def __init__(self, root: str, transform: Optional[Callable] = None,
pre_transform: Optional[Callable] = None):
super().__init__(root, transform, pre_transform)
self.data, self.slices = torch.load(self.processed_paths[0])
@property
def raw_file_names(self) -> List[str]:
return ['adj_full.npz', 'feats.npy', 'class_map.json', 'role.json']
@property
def processed_file_names(self) -> str:
return 'data.pt'
def download(self):
path = osp.join(self.raw_dir, 'adj_full.npz')
gdd.download_file_from_google_drive(self.adj_full_id, path)
path = osp.join(self.raw_dir, 'feats.npy')
gdd.download_file_from_google_drive(self.feats_id, path)
path = osp.join(self.raw_dir, 'class_map.json')
gdd.download_file_from_google_drive(self.class_map_id, path)
path = osp.join(self.raw_dir, 'role.json')
gdd.download_file_from_google_drive(self.role_id, path)
def process(self):
f = np.load(osp.join(self.raw_dir, 'adj_full.npz'))
adj = sp.csr_matrix((f['data'], f['indices'], f['indptr']), f['shape'])
adj = adj.tocoo()
row = torch.from_numpy(adj.row).to(torch.long)
col = torch.from_numpy(adj.col).to(torch.long)
edge_index = torch.stack([row, col], dim=0)
x = np.load(osp.join(self.raw_dir, 'feats.npy'))
x = torch.from_numpy(x).to(torch.float)
ys = [-1] * x.size(0)
with open(osp.join(self.raw_dir, 'class_map.json')) as f:
class_map = json.load(f)
for key, item in class_map.items():
ys[int(key)] = item
y = torch.tensor(ys)
with open(osp.join(self.raw_dir, 'role.json')) as f:
role = json.load(f)
train_mask = torch.zeros(x.size(0), dtype=torch.bool)
train_mask[torch.tensor(role['tr'])] = True
val_mask = torch.zeros(x.size(0), dtype=torch.bool)
val_mask[torch.tensor(role['va'])] = True
test_mask = torch.zeros(x.size(0), dtype=torch.bool)
test_mask[torch.tensor(role['te'])] = True
data = Data(x=x, edge_index=edge_index, y=y, train_mask=train_mask,
val_mask=val_mask, test_mask=test_mask)
data = data if self.pre_transform is None else self.pre_transform(data)
torch.save(self.collate([data]), self.processed_paths[0])
| 38.71134 | 79 | 0.656724 | from typing import Optional, Callable, List
import json
import os.path as osp
import torch
import numpy as np
import scipy.sparse as sp
from google_drive_downloader import GoogleDriveDownloader as gdd
from torch_geometric.data import InMemoryDataset, Data
class Flickr(InMemoryDataset):
adj_full_id = '1crmsTbd1-2sEXsGwa2IKnIB7Zd3TmUsy'
feats_id = '1join-XdvX3anJU_MLVtick7MgeAQiWIZ'
class_map_id = '1uxIkbtg5drHTsKt-PAsZZ4_yJmgFmle9'
role_id = '1htXCtuktuCW8TR8KiKfrFDAxUgekQoV7'
def __init__(self, root: str, transform: Optional[Callable] = None,
pre_transform: Optional[Callable] = None):
super().__init__(root, transform, pre_transform)
self.data, self.slices = torch.load(self.processed_paths[0])
@property
def raw_file_names(self) -> List[str]:
return ['adj_full.npz', 'feats.npy', 'class_map.json', 'role.json']
@property
def processed_file_names(self) -> str:
return 'data.pt'
def download(self):
path = osp.join(self.raw_dir, 'adj_full.npz')
gdd.download_file_from_google_drive(self.adj_full_id, path)
path = osp.join(self.raw_dir, 'feats.npy')
gdd.download_file_from_google_drive(self.feats_id, path)
path = osp.join(self.raw_dir, 'class_map.json')
gdd.download_file_from_google_drive(self.class_map_id, path)
path = osp.join(self.raw_dir, 'role.json')
gdd.download_file_from_google_drive(self.role_id, path)
def process(self):
f = np.load(osp.join(self.raw_dir, 'adj_full.npz'))
adj = sp.csr_matrix((f['data'], f['indices'], f['indptr']), f['shape'])
adj = adj.tocoo()
row = torch.from_numpy(adj.row).to(torch.long)
col = torch.from_numpy(adj.col).to(torch.long)
edge_index = torch.stack([row, col], dim=0)
x = np.load(osp.join(self.raw_dir, 'feats.npy'))
x = torch.from_numpy(x).to(torch.float)
ys = [-1] * x.size(0)
with open(osp.join(self.raw_dir, 'class_map.json')) as f:
class_map = json.load(f)
for key, item in class_map.items():
ys[int(key)] = item
y = torch.tensor(ys)
with open(osp.join(self.raw_dir, 'role.json')) as f:
role = json.load(f)
train_mask = torch.zeros(x.size(0), dtype=torch.bool)
train_mask[torch.tensor(role['tr'])] = True
val_mask = torch.zeros(x.size(0), dtype=torch.bool)
val_mask[torch.tensor(role['va'])] = True
test_mask = torch.zeros(x.size(0), dtype=torch.bool)
test_mask[torch.tensor(role['te'])] = True
data = Data(x=x, edge_index=edge_index, y=y, train_mask=train_mask,
val_mask=val_mask, test_mask=test_mask)
data = data if self.pre_transform is None else self.pre_transform(data)
torch.save(self.collate([data]), self.processed_paths[0])
| true | true |
1c3858d364bdb885edc680b7942cf284a92dde06 | 192 | py | Python | filer_addons/filer_gui/admin/__init__.py | benzkji/django-filer-addons | 0220b1c75520dcda9ec1fe6a46e62a66735c7699 | [
"MIT"
] | null | null | null | filer_addons/filer_gui/admin/__init__.py | benzkji/django-filer-addons | 0220b1c75520dcda9ec1fe6a46e62a66735c7699 | [
"MIT"
] | 23 | 2017-06-17T07:11:07.000Z | 2020-11-06T17:05:05.000Z | filer_addons/filer_gui/admin/__init__.py | benzkji/django-filer-addons | 0220b1c75520dcda9ec1fe6a46e62a66735c7699 | [
"MIT"
] | 2 | 2017-06-20T09:25:09.000Z | 2017-08-03T07:38:17.000Z | from __future__ import unicode_literals
from django.contrib import admin
from .api import FilerGuiAdmin
from ..models import FilerGuiFile
admin.site.register(FilerGuiFile, FilerGuiAdmin)
| 17.454545 | 48 | 0.833333 | from __future__ import unicode_literals
from django.contrib import admin
from .api import FilerGuiAdmin
from ..models import FilerGuiFile
admin.site.register(FilerGuiFile, FilerGuiAdmin)
| true | true |
1c38592f9643bb88ae18a29c8b362c5be017dfa0 | 430 | py | Python | jms_oidc_rp/admin.py | BaiJiangJie/jumpserver-django-oidc-rp | b2d6a63dd82214263b8971412a9043268c31ba7f | [
"MIT"
] | 20 | 2018-04-16T13:17:35.000Z | 2021-06-05T00:08:33.000Z | jms_oidc_rp/admin.py | BaiJiangJie/jumpserver-django-oidc-rp | b2d6a63dd82214263b8971412a9043268c31ba7f | [
"MIT"
] | 9 | 2018-07-20T18:19:13.000Z | 2021-12-22T08:57:18.000Z | oidc_rp/admin.py | bcgov/django-oidc-rp | 50e6fa143e61b04849b4c66beef078be0d7669de | [
"MIT"
] | 21 | 2018-07-10T16:05:44.000Z | 2022-01-24T05:57:09.000Z | """
OpenID Connect relying party (RP) model admin definitions
=========================================================
This module defines admin classes used to populate the Django administration dashboard.
"""
from django.contrib import admin
from .models import OIDCUser
@admin.register(OIDCUser)
class UserAdmin(admin.ModelAdmin):
""" The OIDC user model admin. """
list_display = ('sub', 'user', )
| 22.631579 | 91 | 0.616279 |
from django.contrib import admin
from .models import OIDCUser
@admin.register(OIDCUser)
class UserAdmin(admin.ModelAdmin):
list_display = ('sub', 'user', )
| true | true |
1c38594b131a1b460726372bc8e9479ebc4c1a0f | 44,677 | py | Python | classic_heuristics/gapvrp.py | mschmidt87/VeRyPy | 3086288cf203bbe6dc488de72fb6f8ea2a1382e6 | [
"MIT"
] | 1 | 2020-07-14T15:47:07.000Z | 2020-07-14T15:47:07.000Z | classic_heuristics/gapvrp.py | mrwright313/VeRyPy | eaac6e210d861441071565575750a2f0e25dfb72 | [
"MIT"
] | null | null | null | classic_heuristics/gapvrp.py | mrwright313/VeRyPy | eaac6e210d861441071565575750a2f0e25dfb72 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
""" This file is a part of the VeRyPy classical vehicle routing problem
heuristic library and provides an implementation of the Fisher&Jaikumar (1981)
heuristic, which generates an approximate solution for a VRP via solving it as
an generalized assignment problem (GAP).
The script is callable and can be used as a standalone solver for TSPLIB
formatted CVRPs. It has extensive dependencies: MIP solver Gurobi, built-in TSP
solver, and numpy and scipy for reading and preparing the problem instance."""
###############################################################################
# Written in Python 2.7, but try to maintain Python 3+ compatibility
from __future__ import print_function
from __future__ import division
from signal import signal, SIGINT, default_int_handler
from collections import namedtuple
from math import pi, ceil
from logging import log, DEBUG, WARNING
import numpy as np
from gurobipy import Model, GRB, LinExpr, GurobiError
#from tsp_solvers.tsp_solver_ropt import solve_tsp_ropt as solve_tsp
#from tsp_solvers.tsp_solver_lkh import solve_tsp_lkh as solve_tsp
from tsp_solvers.tsp_solver_gurobi import solve_tsp_gurobi as solve_tsp
from sweep import get_sweep_from_cartesian_coordinates, bisect_angle
from cvrp_io import calculate_D
from util import is_better_sol, totald
from config import MAX_MIP_SOLVER_RUNTIME, MIP_SOLVER_THREADS
from config import CAPACITY_EPSILON as C_EPS
from config import COST_EPSILON as S_EPS
__author__ = "Jussi Rasku"
__copyright__ = "Copyright 2018, Jussi Rasku"
__credits__ = ["Jussi Rasku"]
__license__ = "MIT"
__maintainer__ = "Jussi Rasku"
__email__ = "jussi.rasku@jyu.fi"
__status__ = "Development"
# These hard coded parameters define how the relaxation is adjusted if the
# GAP solution is not L feasible.
L_MPLR_DEFAULT = 1.0
L_ADAPTIVE_MPLR_INIT = 0.85
L_ADAPTIVE_MPLR_INC = 0.85
L_ADAPTIVE_MPLR_MAX_TRIES = 3
INCREASE_K_ON_FAILURE_UPTO = 1.1 # = 10% increase to K (or min of 1)
def _decision_variables_to_assignments(m, Y_ik, N, K):
""" Convert the decision variables in m for keys Y_ik to assignments of
customers i==2..N (0 is the depot) to the routes k=1..K .
TODO: there is probably a neat numpy trick to get node and k indices
out of the decision variable array. For now just use nested loops,
but a cleverer way would problably be faster.
However, "premature optimization is the root of all evil", so profile
first, and modify only after verifying it to be a real bottleneck."""
assignments = []
Y_ik_values = m.getAttr('x', Y_ik)
for k in range(K):
route_nodes = []
for i in range(1, N):
if Y_ik_values[i,k]:
route_nodes.append(i)
assignments.append(route_nodes)
return assignments
def _solve_gap(N, D_s, d, C, K, L=None, L_ctr_multipiler=1.0):
"""A helper function that Solves VRP as a Generalized Assignment Problem
to assign customers to vehicles with a objective function that the delivery
cost as described in (Fisher & Jaikumar 1981).
D_s is the distance matrix complemented with distances to K seed points.
That is:
[D_0]
D_s = [S ], where D_0 is the first row of the full distance matrix and
S is the distances from seed points to node points
d is the list of customer demands with d[0]=0 being the depot node
C is the capacity of the K identical trucks
also, additional (and optional) constraints can be given:
L is the maximum tour cost/duration/length
L_ctr_multipiler allows iteratively adjusting the max route cost
approximation constraint in order to avoid producing assignments that are
ruled infeasible by the feasibility checker.
--
Fisher, M. L. and Jaikumar, R. (1981), A generalized assignment
heuristic for vehicle routing. Networks, 11: 109-124.
"""
## build the cost approximation matrix "insertion_cost"
# it is ~ a insertion cost matrix, where each coefficient is the cost
# of inserting customer i to the route consisting visit to seed k.
#
# we assume that distances are symmetric, but if asymmetric
# distances are to be used, take min
# d_{ik} = min(c_{0i}+c_{i{i_k}}+c_{i{i_k}},
# c_{0{i_k}}+c_[{i_k}i}+c_{i0})
# -(c_{0{i_k}}+c_{{i_k}0})
m = Model("GAPCVRP")
# the order of the keys is important when we interpret the results
Y_ik_keys = [(i,k) for k in range(K) for i in range(1,N)]
# delivery cost approximation coefficients for the objective function
insertion_cost = {(i,k): D_s[0,i]+D_s[k,i]-D_s[k,0] \
for i,k in Y_ik_keys}
# variables and the objective
Y_ik = m.addVars(Y_ik_keys, obj=insertion_cost, vtype=GRB.BINARY, name='y')
## constraints
# c1, the capacity constraint and optional tour cost constraint cl
approx_route_cost_constraints = []
if C: c1_coeffs = d[1:]
for k in range(K):
ck_vars = [Y_ik[i,k] for i in range(1,N)]
if C:
c1_lhs = LinExpr(c1_coeffs,ck_vars)
#c1_lhs = Y_ik.prod(c1_coeffs, '*', k)
m.addConstr(c1_lhs <= C, "c1_k%d"%k)
# ct = optional tour cost constraints
# it is a bit hidden, but the additional side constraint can be found
# from Fisher & Jaikumar (1981) p121, 2. paragraph.
# However, for whatever reason, this does not seem to produce the
# same results as reported in their paper as the constraint easily
# starts to make the problem infeasible and the exact mechanism to
# recover that is not specified in the paper.
if L:
ct_coeffs = [insertion_cost[(i,k)]*L_ctr_multipiler for i in range(1,N)]
ct_lhs = LinExpr(ct_coeffs,ck_vars)
#ct_lhs = Y_ik.prod(ct_coeffs, '*', k)
constr_l = m.addConstr(ct_lhs <= L, "cl_k%d"%k)
approx_route_cost_constraints.append(constr_l)
# c2, the assignment constraints
for i in range(1,N):
# c2_1..N every node assigned only to 1 route
m.addConstr(Y_ik.sum(i, '*') == 1, "c1_i%d"%i)
## update the model and solve
m._vars = Y_ik
m.modelSense = GRB.MINIMIZE
m.update()
#m.write("gapvrp_model.lp")
# disable output
m.setParam('OutputFlag', 0)
m.setParam('Threads', MIP_SOLVER_THREADS)
# REMOVEME
m.setParam('MIPFocus', 3)
m.setParam('TimeLimit', MAX_MIP_SOLVER_RUNTIME)
m.optimize()
# restore SIGINT callback handler which is changed by gurobipy
signal(SIGINT, default_int_handler)
if __debug__:
log(DEBUG-1,"Gurobi runtime = %.2f"%m.Runtime)
if m.Status == GRB.OPTIMAL:
return _decision_variables_to_assignments(m, Y_ik, N, K)
elif m.Status == GRB.INFEASIBLE and L:
# relax the model and allow violating minimal number of the approximate
# route length constraints
pens = [1.0]*len(approx_route_cost_constraints)
m.feasRelax(1, True, None, None, None, approx_route_cost_constraints, pens)
# TODO: not sure if feasRelax can change Status, test it someday
if m.Status == GRB.INTERRUPTED:
raise KeyboardInterrupt() # pass it on
m.optimize()
# restore SIGINT callback handler which is changed by gurobipy
signal(SIGINT, default_int_handler)
status = m.Status
if __debug__:
log(DEBUG-1, "Relaxed problem Gurobi runtime = %.2f"%m.Runtime)
if status == GRB.OPTIMAL:
return _decision_variables_to_assignments(m, Y_ik, N, K)
elif status == GRB.TIME_LIMIT:
raise GurobiError(10023, "Gurobi timeout reached when attempting to solve relaxed SCPCVRP")
elif m.Status == GRB.INTERRUPTED:
raise KeyboardInterrupt() # pass it on
return None
elif m.Status == GRB.TIME_LIMIT:
raise GurobiError(10023, "Gurobi timeout reached when attempting to solve GAP")
elif m.Status == GRB.INTERRUPTED:
raise KeyboardInterrupt() # pass it on
return None
_Cone = namedtuple('_Cone', ['phi1', 'phi2', 'demand', 'nodes'])
def _sweep_seed_points(points, D, d, C, K, trial=0):
"""A seed point generation function that implements the rule used in
Fisher and Jaikumar (1981) to select the seed customers for the delivery
cost approximation calculation in their VRP heuristic. It is assumed that
all customers are located on a plane with euclidean distances D between
them and that the truck capacity C is the the same for all vehicles.
"""
## Assume planar case and convert to a sweep
sweep = get_sweep_from_cartesian_coordinates(points)
## Append each of the K customer cones into K groups of concecutive cones
if C:
alpha = sum(d)/float(K*C)
group_target = alpha*C # = sum(d)/K
EPS = C_EPS
else: #only L set?
total_sweep_len = sum( D[int(sweep[2][i-1]),int(sweep[2][i])]
for i in range(len(sweep[2])) )
group_target = total_sweep_len/K
EPS = S_EPS
if __debug__:
log(DEBUG-2,"Cone group demand/cost target = %.2f"%group_target )
#for start_cone in range(len(cones)):
start_cone_idx = trial
grouped_cones = []
group_start_ray = None
group_end_ray = None
group_cum = 0.0
group_nodes = []
prev_node_i = None
prev_node_rho = None
prev_node_phi = sweep[0][start_cone_idx-1]
if start_cone_idx==0:
prev_node_phi-=2*pi
prev_ray = None
# iterate over all (phi,rho,node_idx) staring from start_cone_idx
# and doing it twice
for circle_view in (sweep.T[start_cone_idx:], sweep.T[:start_cone_idx+1]):
for node_phi,node_rho,i in circle_view:
i = int(i) # is numpy float
if (node_phi<prev_node_phi):
node_phi+=2*pi
ray = bisect_angle(prev_node_phi,node_phi)
if prev_ray is None:
group_start_ray = ray
if __debug__:
log(DEBUG-2,"First node %d cone sets group_start_ray=%.2f"%(i,group_start_ray))
else:
# calculate if the entire cone (~customer) can be added to the group
# or if only a fraction is needed to fill the group.
if C:
cone_fraction = 1.0
if d[prev_node_i]!=0:
cone_fraction = min(1.0, (group_target-group_cum)/d[prev_node_i])
cone_wt = cone_fraction*d[prev_node_i]
else:
cone_fraction = min(1.0, (group_target-group_cum)/(D[prev_node_i,i]))
cone_wt = cone_fraction*D[prev_node_i,i]
group_cum+=cone_wt
group_nodes.append( (prev_node_rho,prev_node_i,
d[prev_node_i] if C else D[prev_node_i,i]) )
if __debug__:
if C:
log(DEBUG-3,"Node %d, added %.2f %% of demand (%.2f)" %\
(prev_node_i, cone_fraction*100, d[prev_node_i]))
else:
log(DEBUG-3,"Node %d, added %.2f %% of cost (%.2f)" %\
(prev_node_i, cone_fraction*100, 0.5*D[prev_node_i,i]))
log(DEBUG-2,"Group %.2f %% full"%\
(group_cum/group_target*100.0))
if (group_target-group_cum)<EPS:
group_end_ray = bisect_angle(prev_ray, ray, cone_fraction)
# group is full, store it
grouped_cones.append( _Cone(group_start_ray,group_end_ray,
group_cum, group_nodes) )
if __debug__:
log(DEBUG-2,"Node %d cone sets group_end_ray=%.2f"%\
(prev_node_i,group_end_ray))
log(DEBUG-2,"Group completed!\n")
# next group
group_start_ray = group_end_ray
group_nodes = []
group_cum = 0
if cone_fraction<1.0:
if C:
rmdr_wt = (1.0-cone_fraction)*d[prev_node_i]
else:
rmdr_wt = (1.0-cone_fraction)*D[prev_node_i,i]
group_cum += rmdr_wt
group_nodes.append((prev_node_rho,prev_node_i,
d[prev_node_i] if C else D[prev_node_i,i]))
if __debug__:
if len(grouped_cones)<K:
log(DEBUG-2,"Node %d cone sets group_start_ray=%.2f"%\
(prev_node_i,group_start_ray))
# the group now spans upto this
group_end_ray = ray
if __debug__:
if len(grouped_cones)<K:
log(DEBUG-2,"Node %d cone grows group to ray=%.2f"%\
(prev_node_i,group_end_ray))
prev_ray = ray
prev_node_i = i
prev_node_rho = node_rho
prev_node_phi = node_phi
## get seed form the resulting K merged cones
seed_points = np.zeros((K,2), dtype=np.float64)
depot_x = points[0][0]
depot_y = points[0][1]
for k, grouped_cone in enumerate(grouped_cones):
if __debug__:
log(DEBUG-3," ===========================================")
log(DEBUG-3," #%d %s"%(k, str(grouped_cone)))
log(DEBUG-3," ===========================================\n")
# Find an arc that splits the k-cone in a way that the linear demand
# under the arc is "around" 0.75 (the exact definition is in the
# Fisher & Jaikumar (1981) paper. Begin by sorting by distance from
# the depot and grow arc as long as weight sum is under the limit.
seed_rho = 0
grow_arc_wt = 0
weight_target = 0.75*group_target # 0.75{\labmda}b
for cr,ci,cwt in sorted(grouped_cone.nodes):
if grow_arc_wt+cwt>weight_target:
# take a fraction of the weight just outside the arc
seed_rho+=((weight_target-grow_arc_wt)/cwt)*(cr-seed_rho)
break
else:
grow_arc_wt+=cwt
seed_rho=cr
# Calculate the actual seed point position
seed_phi = bisect_angle(grouped_cone.phi1,grouped_cone.phi2)
seed_points[k,0] = depot_x+seed_rho*np.cos(seed_phi)
seed_points[k,1] = depot_y+seed_rho*np.sin(seed_phi)
return seed_points.tolist()
def _kmeans_seed_points(points, D, d, C, K, trial=0):
"""A seed point generation function that puts the seed points at customer
node point cluster centers using k-Means clustering."""
from sklearn.cluster import KMeans
kmeans = KMeans(n_clusters=K, random_state=trial).fit(points[1:])
return kmeans.cluster_centers_.tolist()
def _end_of_thoroughfares_seed_points(points, D, d, C, K, trial=0):
"""A seed point generation function that automates the human assisted
idea presented in Fisher and Jaikumar (1981) involving placing the seed
points to the end of throughtfares leaving from the depot. A DBSCAN
clustering is made and the seeds are selected among non-core points. Non-
core points should be, due to the operating principle of DBSCAN, at the
ends of long cluster "arms". By selecting the non-core points farthest from
the depot and previously selected seeds, we should get a set of seed points
closely following the Fisher and Jaikumar (1981) idea: "customers
often lie along radial corridors corresponding to major thoroughfares, and
the most distant ... along these corridors are natural seed customers".
Fisher and Jaikumar (1981) presented the idea interactive computer systems
in mind, whereas this implementation is automatic.
TODO: in practice, the results are underwhelming. Instead, one should do
1d clustering for phis and then choose the farthest point of each
"Sweep cluster".
parameters:
- points, D, d, C, K as before
- trial can be used to get different clusterings from the DBSCAN algorithm.
the DBSCAN min_size is 2,2,3,3,4,4,... for trial 0,1,2,3,4,5... .
The inititial eps is determined by getting the median distance of the
nn=2., 3., 2., 3., 3., 4., 3,... nearest neightbour of all nodes
depending if the trial is 0,1,2,3,4,5,6,7.. following the formula
nn=2+trial%2+int(trial/4))
The seed points are selected among the non-core points S_nc by
maximizing the squared distances . If it
happens that |S_nc|<K, all non-core points are included and the rest
of the seed points clustered points are
enough non-core points are found.
WARNING: This seed heuristic may return None seeds as the existence of non-
core points cannot be guranteed.
"""
from sklearn.cluster import DBSCAN
from util import produce_nn_list
# use a heuristic to get eps that finds all 2. closest nodes and
# uses the median distance of those as the eps
N = len(d)
nnD = produce_nn_list(D)
nn = 2+trial%2+int(trial/4)
nn2l = [nnS[nn][0] for nnS in nnD]
nn2l.sort()
min_size = 3#+int(trial/2)
eps = nn2l[int(N/2)]
## Get non-core DBSCAN points
if __debug__:
log(DEBUG-2,"Doing DBSCAN with eps =", eps, " min_size =",min_size)
db = DBSCAN(eps=eps, min_samples=min_size).fit(points)
outliers_mask = db.labels_ == -1
clustered_mask = db.labels_ != -1
core_samples_mask = np.zeros(N, dtype=bool)
core_samples_mask[db.core_sample_indices_] = True
# we are interested of the nodes at the fringes of the clusters
candidate_mask = clustered_mask^core_samples_mask
candidate_idxs = np.where(candidate_mask)[0].tolist()
candidates_type = "cluster non-core"
if __debug__:
log(DEBUG-3,"DBSCAN labels = %s"%str(zip(range(N),db.labels_)))
log(DEBUG-3,"DBSCAN core = %s"%str(db.core_sample_idxs_))
log(DEBUG-2,"Select %d seed nodes from non-core nodes %s."%
(min(len(candidate_idxs),K), str(candidate_idxs)))
seeds = []
selected_seeds_mask = np.zeros(N, dtype=bool)
# make depot like a seed -> maximize distance from it
selected_seeds_mask[0] = True
if len(candidate_idxs)<=K:
# if all candidates are needed, add them without checking the distances
for seed_idx in candidate_idxs:
seeds.append( points[seed_idx] )
if __debug__:
log(DEBUG-2,"Selecting n%d (%.2f, %.2f) that is a %s point to be a seed"%
(seed_idx,points[seed_idx][0],points[seed_idx][1],candidates_type))
selected_seeds_mask[seed_idx] = True
candidate_idxs = []
used_core_points = False
while len(seeds)<K:
if not candidate_idxs:
if not used_core_points:
# ran out of non-core candidates. Use clustered as candidates
candidate_mask = core_samples_mask
candidate_idxs = np.where(core_samples_mask)[0].tolist()
candidates_type = "cluster core"
used_core_points = True
if __debug__:
log(DEBUG-3,"Ran out of non-core nodes, select %d seed nodes from core nodes %s"%
(min(len(candidate_idxs), K-len(seeds)), str(candidate_idxs)))
else:
candidate_mask = outliers_mask
candidate_idxs = np.where(outliers_mask)[0].tolist()
candidates_type = "outliers"
if __debug__:
log(DEBUG-3, "Ran out of core and non-core nodes, select %d seed nodes from outlier nodes %s"%
(K-len(seeds), str(candidate_idxs)))
# maximize the distance to other seeds and depot
if not seeds:
D_to_seeds = D[selected_seeds_mask,candidate_mask]
else:
D_to_seeds = np.sum( np.sqrt((D[selected_seeds_mask,:])[:,candidate_mask]), axis=0)
seed_idx = candidate_idxs[np.argmax( D_to_seeds )]
selected_seeds_mask[seed_idx] = True
seeds.append( points[seed_idx] )
if __debug__:
log(DEBUG-2, "Selecting n%d (%.2f, %.2f) that is a %s point to be a seed"%
(seed_idx,points[seed_idx][0],points[seed_idx][1], candidates_type))
# prevent selecting it again
candidate_mask[seed_idx] = False
candidate_idxs.remove(seed_idx)
return seeds
def _large_demand_seed_points(points, D, d, C, K, trial=0):
"""A seed point generation function that automates the human assisted
idea presented in Fisher and Jaikumar (1981)
"""
# make sure we are dealing with np arrays here
np_d = np.array(d)
N = len(d)
# we are look mainly the large d nodes where only 1 fits on a route
can_fit_only_1_mask = np_d > (0.5*C)
candidate_d_mask = can_fit_only_1_mask.copy()
candidate_d_idxs = np.where(can_fit_only_1_mask)[0].tolist()
if trial:
# in addition, add as many OTHER largest d ones as trial is
not_over_half_idxs = np.where( ~candidate_d_mask )[0].tolist()
sorted_d = [(d[i], i) for i in not_over_half_idxs]
sorted_d.sort(reverse=True)
sorted_d_idxs = list(zip(*sorted_d)[1])
additional_large_d_idxs = sorted_d_idxs[max(0, trial-N):min(N,trial)]
candidate_d_idxs+=additional_large_d_idxs
candidate_d_mask[additional_large_d_idxs] = True
large_d_mask = np.copy(candidate_d_mask)
if __debug__:
log(DEBUG-2, "Select %d seed nodes from large demand nodes %s"%
(min(len(candidate_d_idxs),K), str(candidate_d_idxs)))
seeds = []
selected_seeds_mask = np.zeros(len(d), dtype=bool)
# make depot like a seed -> maximize distance from it
selected_seeds_mask[0] = True
if len(candidate_d_idxs)<=K:
# if all candidates are needed, add them without checking the distances
for seed_idx in candidate_d_idxs:
seeds.append( points[seed_idx] )
selected_seeds_mask[seed_idx] = True
if __debug__:
log(DEBUG-2,"Selecting n%d (%.2f, %.2f) that %s to be a seed"%\
(seed_idx,points[seed_idx][0],points[seed_idx][1],
"fills over the half of the capacity" if can_fit_only_1_mask[seed_idx]
else "is within "+str(trial)+" largest demands"))
candidate_d_idxs = []
select_from_non_large = False
while len(seeds)<K:
if not candidate_d_idxs:
candidate_d_mask = ~large_d_mask
candidate_d_mask[0]=False
candidate_d_idxs = np.where(candidate_d_mask)[0].tolist()
select_from_non_large = True
if __debug__:
log(DEBUG-2,"Ran out of nodes with large demand, select %d seed nodes from rest of the nodes %s using inter seed distances weighted by the node demand"%
(min(len(candidate_d_idxs), K-len(seeds)), str(candidate_d_idxs)))
# maximize the distance to other seeds and depot
if not seeds:
D_to_seeds = D[selected_seeds_mask,candidate_d_mask]
else:
D_to_seeds = np.sum( np.sqrt((D[selected_seeds_mask,:])[:,candidate_d_mask]), axis=0)
if select_from_non_large:
# multiply by demand
D_to_seeds = np.multiply(D_to_seeds,np_d[candidate_d_mask]/C)
seed_idx = candidate_d_idxs[np.argmax( D_to_seeds )]
selected_seeds_mask[seed_idx] = True
seeds.append( points[seed_idx] )
if __debug__:
if can_fit_only_1_mask[seed_idx]:
candidates_type = "fills over the half of the capacity"
elif large_d_mask[seed_idx]:
candidates_type = "is within "+str(trial)+" largest demands"
else:
candidates_type = "when weighted by demand has largest distance from other seeds"
log(DEBUG-2,"Selecting a node n%d (%.2f, %.2f) that %s to be a seed"%\
(seed_idx,points[seed_idx][0],points[seed_idx][1], candidates_type))
# prevent selecting it again
candidate_d_mask[seed_idx] = False
candidate_d_idxs.remove(seed_idx)
return seeds
def gap_init(points, D, d, C, L=None, st=None, K=None, minimize_K=True,
find_optimal_seeds=True,
seed_method="cones",
seed_edge_weight_type='EUC_2D',
use_adaptive_L_constraint_weights=True,
increase_K_on_failure=False):
#REMOVEME, disable!
#increase_K_on_failure=True):
""" An implementation of a three phase cluster-first-route-second CVRP
construction / route initialization algorithm. The first two phases involve
the clustering. First, a seed point is generated for each route, which is
then used in approximating customer node service costs in solving
generalized assignment problem (GAP) relaxation of the VRP. The resulting
assignments are then routed using a TSP solver. The algorithm has been
first proposed in (Fisher and Jaikumar 1981).
The algorithm assumes that the problem is planar and this implementation
allows seed in two ways:
* seed_method="cones", the initialization method of Fisher and Jaikumar
(1981) which can be described as Sweep with fractional distribution of
customer demand and placing the seed points approximately to the center
of demand mass of created sectors.
* seed_method="kmeans", intialize seed points to k-means cluster centers.
* seed_method="large_demands", according to Fisher and Jaikumar (1981)
"Customers for which d_i > 1/2 C can also be made seed customers".
However applying this rule relies on human operator who then decides
the intuitively best seed points. This implementation selects the
seed points satisfying the criteria d_i>mC, where m is the fractional
capacity multipier, that are farthest from the depot and each other.
The m is made iteratively smaller if there are no at least K seed point
candidates.
* seed_method="ends_of_thoroughfares", this option was descibed in
(Fisher and Jaikumar 1981) as "Most distant customers at the end of
thoroughfares leaving from the depot are natural seed customers". They
relied on human operator. To automate this selection we make a
DBSCAN clustering with eps = median 2. nearest neighbor of all nodes
and min_samples of 3.
The other parameters are:
* points is a list of x,y coordinates of the depot [0] and the customers.
* D is a numpy ndarray (or equvalent) of the full 2D distance matrix.
including the service times (st/2.0 for leaving and entering nodes).
* d is a list of demands. d[0] should be 0.0 as it is the depot.
* C is the capacity constraint limit for the identical vehicles.
* L is the optional constraint for the maximum route length/duration/cost.
* st is the service time. However, also the D should be modified with
service times to allow straight computation of the TSP solutions (see
above)
* K is the optional parameter specifying the required number of vehicles.
The algorithm is only allowed to find solutions with this many vehicles.
* minimize_K, if set to True (default), makes the minimum number of routes
the primary and the solution cost the secondary objective. If set False
the algorithm optimizes for mimimum solution / route cost by increasing
K as long as it seems beneficial. WARNING: the algorithm suits this use
case (cost at the objective) poorly and setting this option to False may
significantly increase the required CPU time.
* find_optimal_seeds if set to True, tries all possible Sweep start
positions / k-Means with N different seeds. If False, only one sweep
from the node closest to the depot is done / k-Means clustering is done
only once with one random seed value.
* seed_edge_weight_type specifies how to round off the distances from the
customer nodes (points) to the seed points. Supports all TSPLIB edge
weight types.
Note1: The GAP is optimized using Gurobi solver. If L constraint is set,
the side constraints may make the GAP instance tricky to solve and it
is advisable to set a sensible timeout with config.MAX_MIP_SOLVER_RUNTIME
* use_adaptive_L_constraint_weights if set True, and the L constraint is
set, the algorithm adaptively adjusts the route cost approximation of the
relevant side constraints so that a solution which is not L infeasible or
GAP infeasible is found. The exact handling of L consraint is vague in
(Fisher and Jaikumar 1981) and this was our best guess on how the
feasible region of the problem can be found. Note that if GAP solver is
terminated due to a timeout, the adaptive multipier is increased and
GAP solution is attempted again. However, if increase_K_on_failure is set,
(see below) it takes priority over this.
* increase_K_on_failure (default False) is another countermeasure against
long running GAP solving attempts for problem instances without L
constraint (if there is L constraint, and use_adaptive_L_constraint_-
weights is enabled, this is ignored) or instances where K estimation
does not work and it takes excessively long time to check all initial
seed configurations before increasing K. If Gurobi timeout is encountered
or the solution is GAP infeasible, and this option is enabled, the K is
temporately increased, new seeds points generated for current sweep start
location and another GAP solution attempt is made. K is allowed to
increased temporarely up to 10% of the mimimum K allowed (or 1, whichever
is larger).
Note2: logger controls the debug level but running the script with
Python -O option disables all debug output.
Fisher, M. L. and Jaikumar, R. (1981), A generalized assignment heuristic
for vehicle routing. Networks, 11: 109-124. doi:10.1002/net.3230110205
""" #TODO: other alternatives
# customers with maximum demand or most distant customer from origin
if seed_method=="cones":
seed_f = _sweep_seed_points
if seed_method=="kmeans":
seed_f = _kmeans_seed_points
if seed_method=="large_demands":
if not C: raise ValueError("""The "large_demands" seed initialization method requires demands and C constraint to be known.""")
seed_f = _large_demand_seed_points
if seed_method=="ends_of_thoroughfares":
seed_f = _end_of_thoroughfares_seed_points
int_dists = issubclass(D.dtype.type, np.integer)
if seed_edge_weight_type=="EXPLICIT":
seed_edge_weight_type = "EUC_2D" if int_dists else "EXACT_2D"
if not points:
raise ValueError("The algorithm requires 2D coordinates for the points")
N = len(D)
if K:
startK = K
maxK = K
else:
# start from the smallest K possible
if C:
startK = int(ceil(sum(d)/C))
elif L:
# find a lower bound by checking how many visits from the TSP
# tour need to add to have any chance of making this L feasible.
_,tsp_f = solve_tsp(D, range(1,N))
shortest_depot_edges = list(D[0,1:])
shortest_depot_edges.sort()
startK = int(ceil(tsp_f/L))
while True:
if tsp_f+sum(shortest_depot_edges[:startK*2])<=startK*L:
break
startK+=1
else:
raise ValueError("If C and L have not been set, K is required")
maxK = N-1
# We only need first row of the distance matrix to calculcate insertion
# costs for GAP objective function
D_0 = np.copy( D[0,:] )
best_sol = None
best_f = None
best_K = None
seed_trial = 0
incK = 0
maxKinc = max(startK+1, int(startK*INCREASE_K_ON_FAILURE_UPTO))
L_ctr_multipiler = L_MPLR_DEFAULT
if L and use_adaptive_L_constraint_weights:
# Adaptive L constraint multipier
L_ctr_multipiler = L_ADAPTIVE_MPLR_INIT
L_ctr_multipiler_tries = 0
try:
for currentK in range(startK, maxK+1):
found_improving_solution_for_this_K = False
seed_trial=0
while True:
if __debug__:
log(DEBUG, "ITERATION:K=%d, trial=%d, L_ctr_mul=%.6f\n"%
(currentK+incK,seed_trial,L_ctr_multipiler))
log(DEBUG-1, "Getting %d seed points...\n"%(currentK+incK))
# Get seed points
seed_points = seed_f(points, D, d, C, currentK+incK, seed_trial)
if __debug__:
log(DEBUG-1, "...got seed points %s\n"%str(seed_points))
# Extend the distance matrix with seed distances
S = calculate_D(seed_points, points, seed_edge_weight_type)
if st:
# include the "leaving half" of the service_time in the
# distances (the other half is already added to the D
# prior to gapvrp_init)
halftst = int(st/2) if int_dists else st/2.0
S[:,1:] += halftst
D_s = np.vstack( (D_0, S) )
GAP_infeasible = False
L_infeasible = False
solution = [0]
sol_f = 0
solved = False
sol_K = 0
take_next_seed = False
try:
# Distribute the nodes to vehicles using the approxmate
# service costs in D_s and by solving it as GAP
#
#TODO: the model has the same dimensions for all iterations
# with the same K and only the weights differ. Consider
# replacing the coefficient matrix e.g. via C interface
#https://stackoverflow.com/questions/33461329
assignments = _solve_gap(N, D_s, d, C, currentK+incK, L,
L_ctr_multipiler)
if not assignments:
if __debug__:
log(DEBUG, "INFEASIBILITY: GAP infeasible solution")
corrective_action = "try with another seed = %d"%seed_trial
GAP_infeasible = True
else:
if __debug__:
log(DEBUG-1, "Assignments = %s"%str(assignments))
# Due to floating point inaccuracies in L constrained
# cases the feasrelax may be used, which, in turn, can
# in some corner cases return solutions that are not
# really feasible. Make sure it is not the case
if L: served = set([0])
for route_nodes in assignments:
if not route_nodes:
continue
route,route_l = solve_tsp(D, [0]+route_nodes)
# Check for feasibility violations due to feasrelax
if L:
served |= set(route_nodes)
if C and d and totald(route,d)-C_EPS>C:
if __debug__:
log(DEBUG, "INFEASIBILITY: feasRelax "+
"caused GAP infeasible solution "+
" (capacity constraint violation)")
GAP_infeasible = True
break # the route loop
solution += route[1:]
sol_f += route_l
sol_K += 1
if __debug__:
log(DEBUG-2, "DEBUG: Got TSP solution %s (%.2f)"%
(str(route),route_l))
if L and route_l-S_EPS>L:
if __debug__:
log(DEBUG, "INFEASIBILITY: L infeasible solution")
L_infeasible = True
break # break route for loop
# Check for feasibility violations due to feasrelax.
# Have all customers been served?
if not GAP_infeasible and not L_infeasible and\
L and len(served)<len(D):
if __debug__:
log(DEBUG, "INFEASIBILITY: feasRelax caused GAP "+
"infeasible solution (all customers "+
"are not served)")
GAP_infeasible = True
if not GAP_infeasible and not L_infeasible:
if __debug__:
log(DEBUG, "Yielded feasible solution = %s (%.2f)"%(str(solution), sol_f))
solved = True
except GurobiError as grbe:
if __debug__: log(WARNING, str(grbe))
if L and use_adaptive_L_constraint_weights and \
L_ctr_multipiler_tries<L_ADAPTIVE_MPLR_MAX_TRIES:
L_ctr_multipiler+=L_ADAPTIVE_MPLR_INC
L_ctr_multipiler_tries+=1
if __debug__: corrective_action = "Gurobi timeout, try with another L_ctr_multipiler = %.2f"%L_ctr_multipiler
elif increase_K_on_failure and currentK+incK+1<=maxKinc:
if L and use_adaptive_L_constraint_weights and\
L_ctr_multipiler_tries>=L_ADAPTIVE_MPLR_MAX_TRIES:
# try with all multiplier values for larger K
L_ctr_multipiler = L_ADAPTIVE_MPLR_INIT
L_ctr_multipiler_tries = 0
incK+=1
if __debug__: corrective_action = "Gurobi timeout, temporarely increase K by %d"%incK
elif find_optimal_seeds:
take_next_seed = True
else:
grbe.message+=", consider increasing the MAX_MIP_SOLVER_RUNTIME in config.py"
raise grbe
else:
if L and use_adaptive_L_constraint_weights:
## Adaptive GAP/L constraint multiplier reset
# reset multiplier in case it the L feasibility was not violated
# or it has reached the max_value.
if solved or L_ctr_multipiler_tries>=L_ADAPTIVE_MPLR_MAX_TRIES:
L_ctr_multipiler = L_ADAPTIVE_MPLR_INIT
L_ctr_multipiler_tries = 0
take_next_seed = True
if not solved and increase_K_on_failure and currentK+incK+1<=maxKinc:
incK+=1
take_next_seed = False
if __debug__: corrective_action = "temporarely increase K by %d"%incK
else:
if __debug__: corrective_action = "try with another seed = %d"%seed_trial
## Adaptive GAP/L constraint multiplier update
else:
L_ctr_multipiler+=L_ADAPTIVE_MPLR_INC
L_ctr_multipiler_tries+=1
if __debug__: corrective_action = "try with another L_ctr_multipiler = %.2f"%L_ctr_multipiler
else:
if not solved and increase_K_on_failure and currentK+incK+1<=maxKinc:
incK+=1
if __debug__: corrective_action = "temporarely increase K by %d"%incK
else:
take_next_seed = True
# Store the best so far
if solved:
if is_better_sol(best_f, best_K, sol_f, sol_K, minimize_K):
best_sol = solution
best_f = sol_f
best_K = sol_K
found_improving_solution_for_this_K = True
else:
# No feasible solution was found for this trial (max route cost
# or capacity constraint was violated).
if __debug__:
if GAP_infeasible or L_infeasible:
log(DEBUG, "Constraint is violated, "+corrective_action)
else:
log(DEBUG, "Continuing search, "+corrective_action)
if take_next_seed:
incK = 0
seed_trial+=1
if not find_optimal_seeds:
break # seed loop, possibly try next K
if seed_trial==N:
incK = 0
break # seed loop, possibly try next K
if minimize_K:
# do not try different K if we found a solution
if best_sol:
break # K loop
else: # not minimize_K
# We already have an feasible solution for K<K_current, and could
# not find a better solution than that on K_current. Therefore, it
# is improbable we will find one even if we increase K and we
# should stop here.
if best_sol and not found_improving_solution_for_this_K:
break
except KeyboardInterrupt: #or SIGINT
# pass on the current best_sol
raise KeyboardInterrupt(best_sol)
return best_sol
# ---------------------------------------------------------------------
# Wrapper for the command line user interface (CLI)
def get_gap_algorithm(seed_method="cones"):
algo_name = "FJ81-GAP"
algo_desc = "Fisher & Jaikumar (1981) generalized assignment problem heuristic"
def call_init(points, D, d, C, L, st, wtt, single, minimize_K):
return gap_init(points, D, d, C, L=L, st=st,
K=None, minimize_K=minimize_K,
seed_edge_weight_type=wtt,
find_optimal_seeds=(not single),
seed_method=seed_method)
return (algo_name, algo_desc, call_init)
if __name__=="__main__":
from shared_cli import cli
cli(*get_gap_algorithm())
| 47.327331 | 168 | 0.583298 |
emand (%.2f)" %\
(prev_node_i, cone_fraction*100, d[prev_node_i]))
else:
log(DEBUG-3,"Node %d, added %.2f %% of cost (%.2f)" %\
(prev_node_i, cone_fraction*100, 0.5*D[prev_node_i,i]))
log(DEBUG-2,"Group %.2f %% full"%\
(group_cum/group_target*100.0))
if (group_target-group_cum)<EPS:
group_end_ray = bisect_angle(prev_ray, ray, cone_fraction)
grouped_cones.append( _Cone(group_start_ray,group_end_ray,
group_cum, group_nodes) )
if __debug__:
log(DEBUG-2,"Node %d cone sets group_end_ray=%.2f"%\
(prev_node_i,group_end_ray))
log(DEBUG-2,"Group completed!\n")
group_start_ray = group_end_ray
group_nodes = []
group_cum = 0
if cone_fraction<1.0:
if C:
rmdr_wt = (1.0-cone_fraction)*d[prev_node_i]
else:
rmdr_wt = (1.0-cone_fraction)*D[prev_node_i,i]
group_cum += rmdr_wt
group_nodes.append((prev_node_rho,prev_node_i,
d[prev_node_i] if C else D[prev_node_i,i]))
if __debug__:
if len(grouped_cones)<K:
log(DEBUG-2,"Node %d cone sets group_start_ray=%.2f"%\
(prev_node_i,group_start_ray))
group_end_ray = ray
if __debug__:
if len(grouped_cones)<K:
log(DEBUG-2,"Node %d cone grows group to ray=%.2f"%\
(prev_node_i,group_end_ray))
prev_ray = ray
prev_node_i = i
prev_node_rho = node_rho
prev_node_phi = node_phi
float64)
depot_x = points[0][0]
depot_y = points[0][1]
for k, grouped_cone in enumerate(grouped_cones):
if __debug__:
log(DEBUG-3," ===========================================")
log(DEBUG-3," #%d %s"%(k, str(grouped_cone)))
log(DEBUG-3," ===========================================\n")
seed_rho = 0
grow_arc_wt = 0
weight_target = 0.75*group_target
for cr,ci,cwt in sorted(grouped_cone.nodes):
if grow_arc_wt+cwt>weight_target:
seed_rho+=((weight_target-grow_arc_wt)/cwt)*(cr-seed_rho)
break
else:
grow_arc_wt+=cwt
seed_rho=cr
seed_phi = bisect_angle(grouped_cone.phi1,grouped_cone.phi2)
seed_points[k,0] = depot_x+seed_rho*np.cos(seed_phi)
seed_points[k,1] = depot_y+seed_rho*np.sin(seed_phi)
return seed_points.tolist()
def _kmeans_seed_points(points, D, d, C, K, trial=0):
from sklearn.cluster import KMeans
kmeans = KMeans(n_clusters=K, random_state=trial).fit(points[1:])
return kmeans.cluster_centers_.tolist()
def _end_of_thoroughfares_seed_points(points, D, d, C, K, trial=0):
from sklearn.cluster import DBSCAN
from util import produce_nn_list
N = len(d)
nnD = produce_nn_list(D)
nn = 2+trial%2+int(trial/4)
nn2l = [nnS[nn][0] for nnS in nnD]
nn2l.sort()
min_size = 3
eps = nn2l[int(N/2)]
EBUG-2,"Doing DBSCAN with eps =", eps, " min_size =",min_size)
db = DBSCAN(eps=eps, min_samples=min_size).fit(points)
outliers_mask = db.labels_ == -1
clustered_mask = db.labels_ != -1
core_samples_mask = np.zeros(N, dtype=bool)
core_samples_mask[db.core_sample_indices_] = True
candidate_mask = clustered_mask^core_samples_mask
candidate_idxs = np.where(candidate_mask)[0].tolist()
candidates_type = "cluster non-core"
if __debug__:
log(DEBUG-3,"DBSCAN labels = %s"%str(zip(range(N),db.labels_)))
log(DEBUG-3,"DBSCAN core = %s"%str(db.core_sample_idxs_))
log(DEBUG-2,"Select %d seed nodes from non-core nodes %s."%
(min(len(candidate_idxs),K), str(candidate_idxs)))
seeds = []
selected_seeds_mask = np.zeros(N, dtype=bool)
selected_seeds_mask[0] = True
if len(candidate_idxs)<=K:
for seed_idx in candidate_idxs:
seeds.append( points[seed_idx] )
if __debug__:
log(DEBUG-2,"Selecting n%d (%.2f, %.2f) that is a %s point to be a seed"%
(seed_idx,points[seed_idx][0],points[seed_idx][1],candidates_type))
selected_seeds_mask[seed_idx] = True
candidate_idxs = []
used_core_points = False
while len(seeds)<K:
if not candidate_idxs:
if not used_core_points:
candidate_mask = core_samples_mask
candidate_idxs = np.where(core_samples_mask)[0].tolist()
candidates_type = "cluster core"
used_core_points = True
if __debug__:
log(DEBUG-3,"Ran out of non-core nodes, select %d seed nodes from core nodes %s"%
(min(len(candidate_idxs), K-len(seeds)), str(candidate_idxs)))
else:
candidate_mask = outliers_mask
candidate_idxs = np.where(outliers_mask)[0].tolist()
candidates_type = "outliers"
if __debug__:
log(DEBUG-3, "Ran out of core and non-core nodes, select %d seed nodes from outlier nodes %s"%
(K-len(seeds), str(candidate_idxs)))
if not seeds:
D_to_seeds = D[selected_seeds_mask,candidate_mask]
else:
D_to_seeds = np.sum( np.sqrt((D[selected_seeds_mask,:])[:,candidate_mask]), axis=0)
seed_idx = candidate_idxs[np.argmax( D_to_seeds )]
selected_seeds_mask[seed_idx] = True
seeds.append( points[seed_idx] )
if __debug__:
log(DEBUG-2, "Selecting n%d (%.2f, %.2f) that is a %s point to be a seed"%
(seed_idx,points[seed_idx][0],points[seed_idx][1], candidates_type))
candidate_mask[seed_idx] = False
candidate_idxs.remove(seed_idx)
return seeds
def _large_demand_seed_points(points, D, d, C, K, trial=0):
np_d = np.array(d)
N = len(d)
can_fit_only_1_mask = np_d > (0.5*C)
candidate_d_mask = can_fit_only_1_mask.copy()
candidate_d_idxs = np.where(can_fit_only_1_mask)[0].tolist()
if trial:
not_over_half_idxs = np.where( ~candidate_d_mask )[0].tolist()
sorted_d = [(d[i], i) for i in not_over_half_idxs]
sorted_d.sort(reverse=True)
sorted_d_idxs = list(zip(*sorted_d)[1])
additional_large_d_idxs = sorted_d_idxs[max(0, trial-N):min(N,trial)]
candidate_d_idxs+=additional_large_d_idxs
candidate_d_mask[additional_large_d_idxs] = True
large_d_mask = np.copy(candidate_d_mask)
if __debug__:
log(DEBUG-2, "Select %d seed nodes from large demand nodes %s"%
(min(len(candidate_d_idxs),K), str(candidate_d_idxs)))
seeds = []
selected_seeds_mask = np.zeros(len(d), dtype=bool)
selected_seeds_mask[0] = True
if len(candidate_d_idxs)<=K:
for seed_idx in candidate_d_idxs:
seeds.append( points[seed_idx] )
selected_seeds_mask[seed_idx] = True
if __debug__:
log(DEBUG-2,"Selecting n%d (%.2f, %.2f) that %s to be a seed"%\
(seed_idx,points[seed_idx][0],points[seed_idx][1],
"fills over the half of the capacity" if can_fit_only_1_mask[seed_idx]
else "is within "+str(trial)+" largest demands"))
candidate_d_idxs = []
select_from_non_large = False
while len(seeds)<K:
if not candidate_d_idxs:
candidate_d_mask = ~large_d_mask
candidate_d_mask[0]=False
candidate_d_idxs = np.where(candidate_d_mask)[0].tolist()
select_from_non_large = True
if __debug__:
log(DEBUG-2,"Ran out of nodes with large demand, select %d seed nodes from rest of the nodes %s using inter seed distances weighted by the node demand"%
(min(len(candidate_d_idxs), K-len(seeds)), str(candidate_d_idxs)))
if not seeds:
D_to_seeds = D[selected_seeds_mask,candidate_d_mask]
else:
D_to_seeds = np.sum( np.sqrt((D[selected_seeds_mask,:])[:,candidate_d_mask]), axis=0)
if select_from_non_large:
D_to_seeds = np.multiply(D_to_seeds,np_d[candidate_d_mask]/C)
seed_idx = candidate_d_idxs[np.argmax( D_to_seeds )]
selected_seeds_mask[seed_idx] = True
seeds.append( points[seed_idx] )
if __debug__:
if can_fit_only_1_mask[seed_idx]:
candidates_type = "fills over the half of the capacity"
elif large_d_mask[seed_idx]:
candidates_type = "is within "+str(trial)+" largest demands"
else:
candidates_type = "when weighted by demand has largest distance from other seeds"
log(DEBUG-2,"Selecting a node n%d (%.2f, %.2f) that %s to be a seed"%\
(seed_idx,points[seed_idx][0],points[seed_idx][1], candidates_type))
candidate_d_mask[seed_idx] = False
candidate_d_idxs.remove(seed_idx)
return seeds
def gap_init(points, D, d, C, L=None, st=None, K=None, minimize_K=True,
find_optimal_seeds=True,
seed_method="cones",
seed_edge_weight_type='EUC_2D',
use_adaptive_L_constraint_weights=True,
increase_K_on_failure=False):
if seed_method=="cones":
seed_f = _sweep_seed_points
if seed_method=="kmeans":
seed_f = _kmeans_seed_points
if seed_method=="large_demands":
if not C: raise ValueError("""The "large_demands" seed initialization method requires demands and C constraint to be known.""")
seed_f = _large_demand_seed_points
if seed_method=="ends_of_thoroughfares":
seed_f = _end_of_thoroughfares_seed_points
int_dists = issubclass(D.dtype.type, np.integer)
if seed_edge_weight_type=="EXPLICIT":
seed_edge_weight_type = "EUC_2D" if int_dists else "EXACT_2D"
if not points:
raise ValueError("The algorithm requires 2D coordinates for the points")
N = len(D)
if K:
startK = K
maxK = K
else:
if C:
startK = int(ceil(sum(d)/C))
elif L:
_,tsp_f = solve_tsp(D, range(1,N))
shortest_depot_edges = list(D[0,1:])
shortest_depot_edges.sort()
startK = int(ceil(tsp_f/L))
while True:
if tsp_f+sum(shortest_depot_edges[:startK*2])<=startK*L:
break
startK+=1
else:
raise ValueError("If C and L have not been set, K is required")
maxK = N-1
D_0 = np.copy( D[0,:] )
best_sol = None
best_f = None
best_K = None
seed_trial = 0
incK = 0
maxKinc = max(startK+1, int(startK*INCREASE_K_ON_FAILURE_UPTO))
L_ctr_multipiler = L_MPLR_DEFAULT
if L and use_adaptive_L_constraint_weights:
L_ctr_multipiler = L_ADAPTIVE_MPLR_INIT
L_ctr_multipiler_tries = 0
try:
for currentK in range(startK, maxK+1):
found_improving_solution_for_this_K = False
seed_trial=0
while True:
if __debug__:
log(DEBUG, "ITERATION:K=%d, trial=%d, L_ctr_mul=%.6f\n"%
(currentK+incK,seed_trial,L_ctr_multipiler))
log(DEBUG-1, "Getting %d seed points...\n"%(currentK+incK))
seed_points = seed_f(points, D, d, C, currentK+incK, seed_trial)
if __debug__:
log(DEBUG-1, "...got seed points %s\n"%str(seed_points))
S = calculate_D(seed_points, points, seed_edge_weight_type)
if st:
halftst = int(st/2) if int_dists else st/2.0
S[:,1:] += halftst
D_s = np.vstack( (D_0, S) )
GAP_infeasible = False
L_infeasible = False
solution = [0]
sol_f = 0
solved = False
sol_K = 0
take_next_seed = False
try:
assignments = _solve_gap(N, D_s, d, C, currentK+incK, L,
L_ctr_multipiler)
if not assignments:
if __debug__:
log(DEBUG, "INFEASIBILITY: GAP infeasible solution")
corrective_action = "try with another seed = %d"%seed_trial
GAP_infeasible = True
else:
if __debug__:
log(DEBUG-1, "Assignments = %s"%str(assignments))
if L: served = set([0])
for route_nodes in assignments:
if not route_nodes:
continue
route,route_l = solve_tsp(D, [0]+route_nodes)
if L:
served |= set(route_nodes)
if C and d and totald(route,d)-C_EPS>C:
if __debug__:
log(DEBUG, "INFEASIBILITY: feasRelax "+
"caused GAP infeasible solution "+
" (capacity constraint violation)")
GAP_infeasible = True
break
solution += route[1:]
sol_f += route_l
sol_K += 1
if __debug__:
log(DEBUG-2, "DEBUG: Got TSP solution %s (%.2f)"%
(str(route),route_l))
if L and route_l-S_EPS>L:
if __debug__:
log(DEBUG, "INFEASIBILITY: L infeasible solution")
L_infeasible = True
break
if not GAP_infeasible and not L_infeasible and\
L and len(served)<len(D):
if __debug__:
log(DEBUG, "INFEASIBILITY: feasRelax caused GAP "+
"infeasible solution (all customers "+
"are not served)")
GAP_infeasible = True
if not GAP_infeasible and not L_infeasible:
if __debug__:
log(DEBUG, "Yielded feasible solution = %s (%.2f)"%(str(solution), sol_f))
solved = True
except GurobiError as grbe:
if __debug__: log(WARNING, str(grbe))
if L and use_adaptive_L_constraint_weights and \
L_ctr_multipiler_tries<L_ADAPTIVE_MPLR_MAX_TRIES:
L_ctr_multipiler+=L_ADAPTIVE_MPLR_INC
L_ctr_multipiler_tries+=1
if __debug__: corrective_action = "Gurobi timeout, try with another L_ctr_multipiler = %.2f"%L_ctr_multipiler
elif increase_K_on_failure and currentK+incK+1<=maxKinc:
if L and use_adaptive_L_constraint_weights and\
L_ctr_multipiler_tries>=L_ADAPTIVE_MPLR_MAX_TRIES:
L_ctr_multipiler = L_ADAPTIVE_MPLR_INIT
L_ctr_multipiler_tries = 0
incK+=1
if __debug__: corrective_action = "Gurobi timeout, temporarely increase K by %d"%incK
elif find_optimal_seeds:
take_next_seed = True
else:
grbe.message+=", consider increasing the MAX_MIP_SOLVER_RUNTIME in config.py"
raise grbe
else:
if L and use_adaptive_L_constraint_weights:
if solved or L_ctr_multipiler_tries>=L_ADAPTIVE_MPLR_MAX_TRIES:
L_ctr_multipiler = L_ADAPTIVE_MPLR_INIT
L_ctr_multipiler_tries = 0
take_next_seed = True
if not solved and increase_K_on_failure and currentK+incK+1<=maxKinc:
incK+=1
take_next_seed = False
if __debug__: corrective_action = "temporarely increase K by %d"%incK
else:
if __debug__: corrective_action = "try with another seed = %d"%seed_trial
L_ctr_multipiler+=L_ADAPTIVE_MPLR_INC
L_ctr_multipiler_tries+=1
if __debug__: corrective_action = "try with another L_ctr_multipiler = %.2f"%L_ctr_multipiler
else:
if not solved and increase_K_on_failure and currentK+incK+1<=maxKinc:
incK+=1
if __debug__: corrective_action = "temporarely increase K by %d"%incK
else:
take_next_seed = True
if solved:
if is_better_sol(best_f, best_K, sol_f, sol_K, minimize_K):
best_sol = solution
best_f = sol_f
best_K = sol_K
found_improving_solution_for_this_K = True
else:
if __debug__:
if GAP_infeasible or L_infeasible:
log(DEBUG, "Constraint is violated, "+corrective_action)
else:
log(DEBUG, "Continuing search, "+corrective_action)
if take_next_seed:
incK = 0
seed_trial+=1
if not find_optimal_seeds:
break
if seed_trial==N:
incK = 0
break
if minimize_K:
if best_sol:
break
else:
if best_sol and not found_improving_solution_for_this_K:
break
except KeyboardInterrupt:
raise KeyboardInterrupt(best_sol)
return best_sol
def get_gap_algorithm(seed_method="cones"):
algo_name = "FJ81-GAP"
algo_desc = "Fisher & Jaikumar (1981) generalized assignment problem heuristic"
def call_init(points, D, d, C, L, st, wtt, single, minimize_K):
return gap_init(points, D, d, C, L=L, st=st,
K=None, minimize_K=minimize_K,
seed_edge_weight_type=wtt,
find_optimal_seeds=(not single),
seed_method=seed_method)
return (algo_name, algo_desc, call_init)
if __name__=="__main__":
from shared_cli import cli
cli(*get_gap_algorithm())
| true | true |
1c385a3f212c43b7b143336820cc95534fcd766b | 1,761 | py | Python | src/api/authentication/social_login_signup.py | ThaDeveloper/grind | fa90b65d12e6d9b3d658b132874801ecda08c57f | [
"MIT"
] | 1 | 2019-11-06T22:26:26.000Z | 2019-11-06T22:26:26.000Z | src/api/authentication/social_login_signup.py | ThaDeveloper/grind | fa90b65d12e6d9b3d658b132874801ecda08c57f | [
"MIT"
] | 5 | 2021-03-19T02:49:44.000Z | 2021-06-10T19:13:00.000Z | src/api/authentication/social_login_signup.py | ThaDeveloper/grind | fa90b65d12e6d9b3d658b132874801ecda08c57f | [
"MIT"
] | null | null | null | import uuid
from rest_framework.response import Response
from rest_framework import status
from api.models import User
from api.helpers.get_response import custom_reponse
class SocialAuth:
"""
Logs in/ registers a new social user.
"""
def social_login_signup(self, user_info, **kwargs):
"""
If user exists, authenticate user with their `social account` info
else register user using their `social accounts`
info.
Returns: API access token and/or user data.
"""
try:
user = User.objects.get(email=user_info.get('email'))
token = user.token
return custom_reponse('success', 200, toke=token,
message='Logged in successfully.')
except User.DoesNotExist:
password = User.objects.make_random_password()
user = User(
username=str(user_info.get('first_name')) +
str(uuid.uuid1().int)[: 3],
email=user_info.get('email'),
first_name=user_info.get('first_name'),
last_name=user_info.get('last_name'),
active=True)
user.set_password(password)
user.save()
user_details = {
'id': user.id,
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name
}
token = user.token
return Response({
'status': 'success',
'token': token,
'data': user_details,
'message': 'Account created successfully '
}, status=status.HTTP_201_CREATED)
| 33.865385 | 74 | 0.545713 | import uuid
from rest_framework.response import Response
from rest_framework import status
from api.models import User
from api.helpers.get_response import custom_reponse
class SocialAuth:
def social_login_signup(self, user_info, **kwargs):
try:
user = User.objects.get(email=user_info.get('email'))
token = user.token
return custom_reponse('success', 200, toke=token,
message='Logged in successfully.')
except User.DoesNotExist:
password = User.objects.make_random_password()
user = User(
username=str(user_info.get('first_name')) +
str(uuid.uuid1().int)[: 3],
email=user_info.get('email'),
first_name=user_info.get('first_name'),
last_name=user_info.get('last_name'),
active=True)
user.set_password(password)
user.save()
user_details = {
'id': user.id,
'username': user.username,
'email': user.email,
'first_name': user.first_name,
'last_name': user.last_name
}
token = user.token
return Response({
'status': 'success',
'token': token,
'data': user_details,
'message': 'Account created successfully '
}, status=status.HTTP_201_CREATED)
| true | true |
1c385a8aa041e66422af67145f40f8881410313a | 4,766 | py | Python | gcloud/core/apis/drf/viewsets/utils.py | qqqqqie/bk-sops | f2e734c2cdac76f89d2e4f0fd7de36168e452141 | [
"Apache-2.0"
] | 881 | 2019-03-25T02:45:42.000Z | 2022-03-30T09:10:49.000Z | gcloud/core/apis/drf/viewsets/utils.py | m0re-work/bk-sops | d03ba8a4ee0781c6daaf0dd38a7369dc82669f7d | [
"Apache-2.0"
] | 3,303 | 2019-03-25T04:18:03.000Z | 2022-03-31T11:52:03.000Z | gcloud/core/apis/drf/viewsets/utils.py | m0re-work/bk-sops | d03ba8a4ee0781c6daaf0dd38a7369dc82669f7d | [
"Apache-2.0"
] | 395 | 2019-03-25T02:53:36.000Z | 2022-03-31T08:37:28.000Z | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
from iam import Subject, Action, MultiActionRequest
from iam.shortcuts import allow_or_raise_auth_failed
from rest_framework import status
from rest_framework.exceptions import ErrorDetail
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from gcloud import err_code
from gcloud.iam_auth import IAMMeta, get_iam_client
iam = get_iam_client()
iam_logger = logging.getLogger("iam")
class ApiMixin(GenericViewSet):
EXEMPT_STATUS_CODES = {status.HTTP_204_NO_CONTENT}
def finalize_response(self, request, response, *args, **kwargs):
# 对rest_framework的Response进行统一处理
if isinstance(response, Response):
if response.exception is True:
error = response.data.get(
"detail", ErrorDetail("Error from API exception", err_code.UNKNOWN_ERROR.code)
)
response.data = {"result": False, "data": response.data, "code": error.code, "message": str(error)}
elif response.status_code not in self.EXEMPT_STATUS_CODES:
response.data = {"result": True, "data": response.data, "code": err_code.SUCCESS.code, "message": ""}
return super(ApiMixin, self).finalize_response(request, response, *args, **kwargs)
class IAMMixin:
@staticmethod
def iam_auth_check(request, action, resources):
allow_or_raise_auth_failed(
iam=iam,
system=IAMMeta.SYSTEM_ID,
subject=Subject("user", request.user.username),
action=Action(action),
resources=resources,
)
def iam_get_instances_auth_actions(self, request, instances):
helper = getattr(self, "iam_resource_helper", None)
if not helper:
return None
# 1. collect resources
resources_list = []
for instance in instances:
resources_list.append(helper.get_resources(instance))
if not resources_list:
return None
# 2. make request
request = MultiActionRequest(
helper.system,
helper.get_subject_for_alter_list(request, instances),
[Action(action) for action in helper.actions],
[],
helper.get_environment_for_alter_list(request, instances),
)
resource_actions_allowed = helper.iam.batch_resource_multi_actions_allowed(request, resources_list)
iam_logger.debug(
"[drf iam_get_instances_auth_actions] batch_resource_multi_actions_allowed request({}) result: {}".format(
request.to_dict(), resource_actions_allowed
)
)
# 3. assemble action allowed data
auth_actions = dict()
for instance in instances:
rid = str(helper.get_resources_id(instance))
auth_actions[instance.id] = [
action for action, allowed in resource_actions_allowed.get(rid, {}).items() if allowed
]
return auth_actions
def iam_get_instance_auth_actions(self, request, instance):
helper = getattr(self, "iam_resource_helper", None)
if not helper:
return None
# 1. get resources
resources = helper.get_resources(instance)
# 2. make request
request = MultiActionRequest(
helper.system,
helper.get_subject_for_alter_detail(request, instance),
[Action(action) for action in helper.actions],
resources,
helper.get_environment_for_alter_detail(request, instance),
)
actions_allowed = helper.iam.resource_multi_actions_allowed(request)
iam_logger.debug(
"[drf iam_get_instance_auth_actions] resource_multi_actions_allowed request({}) result: {}".format(
request.to_dict(), actions_allowed
)
)
# 3. assemble action allowed data
auth_actions = [action for action, allowed in actions_allowed.items() if allowed]
return auth_actions
| 38.128 | 118 | 0.672472 |
import logging
from iam import Subject, Action, MultiActionRequest
from iam.shortcuts import allow_or_raise_auth_failed
from rest_framework import status
from rest_framework.exceptions import ErrorDetail
from rest_framework.response import Response
from rest_framework.viewsets import GenericViewSet
from gcloud import err_code
from gcloud.iam_auth import IAMMeta, get_iam_client
iam = get_iam_client()
iam_logger = logging.getLogger("iam")
class ApiMixin(GenericViewSet):
EXEMPT_STATUS_CODES = {status.HTTP_204_NO_CONTENT}
def finalize_response(self, request, response, *args, **kwargs):
if isinstance(response, Response):
if response.exception is True:
error = response.data.get(
"detail", ErrorDetail("Error from API exception", err_code.UNKNOWN_ERROR.code)
)
response.data = {"result": False, "data": response.data, "code": error.code, "message": str(error)}
elif response.status_code not in self.EXEMPT_STATUS_CODES:
response.data = {"result": True, "data": response.data, "code": err_code.SUCCESS.code, "message": ""}
return super(ApiMixin, self).finalize_response(request, response, *args, **kwargs)
class IAMMixin:
@staticmethod
def iam_auth_check(request, action, resources):
allow_or_raise_auth_failed(
iam=iam,
system=IAMMeta.SYSTEM_ID,
subject=Subject("user", request.user.username),
action=Action(action),
resources=resources,
)
def iam_get_instances_auth_actions(self, request, instances):
helper = getattr(self, "iam_resource_helper", None)
if not helper:
return None
resources_list = []
for instance in instances:
resources_list.append(helper.get_resources(instance))
if not resources_list:
return None
request = MultiActionRequest(
helper.system,
helper.get_subject_for_alter_list(request, instances),
[Action(action) for action in helper.actions],
[],
helper.get_environment_for_alter_list(request, instances),
)
resource_actions_allowed = helper.iam.batch_resource_multi_actions_allowed(request, resources_list)
iam_logger.debug(
"[drf iam_get_instances_auth_actions] batch_resource_multi_actions_allowed request({}) result: {}".format(
request.to_dict(), resource_actions_allowed
)
)
auth_actions = dict()
for instance in instances:
rid = str(helper.get_resources_id(instance))
auth_actions[instance.id] = [
action for action, allowed in resource_actions_allowed.get(rid, {}).items() if allowed
]
return auth_actions
def iam_get_instance_auth_actions(self, request, instance):
helper = getattr(self, "iam_resource_helper", None)
if not helper:
return None
resources = helper.get_resources(instance)
request = MultiActionRequest(
helper.system,
helper.get_subject_for_alter_detail(request, instance),
[Action(action) for action in helper.actions],
resources,
helper.get_environment_for_alter_detail(request, instance),
)
actions_allowed = helper.iam.resource_multi_actions_allowed(request)
iam_logger.debug(
"[drf iam_get_instance_auth_actions] resource_multi_actions_allowed request({}) result: {}".format(
request.to_dict(), actions_allowed
)
)
auth_actions = [action for action, allowed in actions_allowed.items() if allowed]
return auth_actions
| true | true |
1c385d6490b25ea69dde00beb4f6ee638b4d42f2 | 2,032 | py | Python | corus/sources/taiga/nplus1.py | Ilseyar/corus | 61a4776f5e534469bb9df1e451b6a6d5fc0e991b | [
"MIT"
] | 205 | 2019-05-01T07:38:01.000Z | 2022-03-30T04:02:54.000Z | corus/sources/taiga/nplus1.py | Ilseyar/corus | 61a4776f5e534469bb9df1e451b6a6d5fc0e991b | [
"MIT"
] | 78 | 2019-04-29T06:53:53.000Z | 2021-09-20T14:51:25.000Z | corus/sources/taiga/nplus1.py | Ilseyar/corus | 61a4776f5e534469bb9df1e451b6a6d5fc0e991b | [
"MIT"
] | 18 | 2019-06-19T09:56:10.000Z | 2022-01-30T14:55:14.000Z |
from datetime import datetime
from .common import (
Author,
Meta,
load_tar_metas,
load_tar_texts,
patch_month,
merge_metas,
)
# {'author': 'Владимир Королев',
# 'authorreaders': '',
# 'authortexts': '',
# 'date': '21 Янв. 2017',
# 'magazine': '',
# 'segment': 'nplus1',
# 'source': 'https://nplus1.ru/news/2017/01/21/Asphaltene-3d',
# 'tags': '',
# 'textdiff': '5.2',
# 'textid': '20170121Asphaltene-3d',
# 'textname': '«Архипелаги» асфальтенов ощупали в 3D',
# 'textregion': '',
# 'textrubric': 'Наука',
# 'time': '17:34'},
NPLUS1_MONTHS = {
'Янв.': 'Jan',
'Фев.': 'Feb',
'Март': 'Mar',
'Апр.': 'Apr',
'Май': 'May',
'Июнь': 'Jun',
'Июль': 'Jul',
'Авг.': 'Aug',
'Сен.': 'Sep',
'Окт.': 'Oct',
'Нояб.': 'Nov',
'Дек.': 'Dec',
}
def parse_metas(items):
for item in items:
id = item['textid']
timestamp, date, time = None, item['date'], item['time']
if date and time:
timestamp = patch_month(date, NPLUS1_MONTHS) + time
timestamp = datetime.strptime(timestamp, '%d %b %Y%H:%M')
name = item['author'] or None
author = Author(name=name)
title = item['textname']
rubric = item['textrubric'] or None
url = item['source']
yield Meta(
id=id,
timestamp=timestamp,
author=author,
title=title,
rubric=rubric,
url=url
)
def load_taiga_nplus1_metas(path, offset=0, count=1):
items = load_tar_metas(path, '*/newmetadata.csv', offset, count)
return parse_metas(items)
# home/tsha/NPlus1/texts/20150320drone.txt
# home/tsha/NPlus1/texts/20150320nitrogen.txt
# home/tsha/NPlus1/texts/20150320silica.txt
def load_taiga_nplus1(path, metas=None, offset=1919488, count=7696):
records = load_tar_texts(path, '*/texts/*.txt', offset, count)
return merge_metas(records, metas)
__all__ = [
'load_taiga_nplus1_metas',
'load_taiga_nplus1'
]
| 22.577778 | 69 | 0.572835 |
from datetime import datetime
from .common import (
Author,
Meta,
load_tar_metas,
load_tar_texts,
patch_month,
merge_metas,
)
NPLUS1_MONTHS = {
'Янв.': 'Jan',
'Фев.': 'Feb',
'Март': 'Mar',
'Апр.': 'Apr',
'Май': 'May',
'Июнь': 'Jun',
'Июль': 'Jul',
'Авг.': 'Aug',
'Сен.': 'Sep',
'Окт.': 'Oct',
'Нояб.': 'Nov',
'Дек.': 'Dec',
}
def parse_metas(items):
for item in items:
id = item['textid']
timestamp, date, time = None, item['date'], item['time']
if date and time:
timestamp = patch_month(date, NPLUS1_MONTHS) + time
timestamp = datetime.strptime(timestamp, '%d %b %Y%H:%M')
name = item['author'] or None
author = Author(name=name)
title = item['textname']
rubric = item['textrubric'] or None
url = item['source']
yield Meta(
id=id,
timestamp=timestamp,
author=author,
title=title,
rubric=rubric,
url=url
)
def load_taiga_nplus1_metas(path, offset=0, count=1):
items = load_tar_metas(path, '*/newmetadata.csv', offset, count)
return parse_metas(items)
def load_taiga_nplus1(path, metas=None, offset=1919488, count=7696):
records = load_tar_texts(path, '*/texts/*.txt', offset, count)
return merge_metas(records, metas)
__all__ = [
'load_taiga_nplus1_metas',
'load_taiga_nplus1'
]
| true | true |
1c385f91a9b3b361993612243a63eebb6335184b | 6,325 | py | Python | tests.py | badalraina31/snappass | f16106acc7cada1b3f77fddd312faa963e0b6e20 | [
"MIT"
] | null | null | null | tests.py | badalraina31/snappass | f16106acc7cada1b3f77fddd312faa963e0b6e20 | [
"MIT"
] | null | null | null | tests.py | badalraina31/snappass | f16106acc7cada1b3f77fddd312faa963e0b6e20 | [
"MIT"
] | null | null | null | import re
import time
import unittest
import uuid
from unittest import TestCase
from unittest import mock
from urllib.parse import unquote
from cryptography.fernet import Fernet
from freezegun import freeze_time
from werkzeug.exceptions import BadRequest
from fakeredis import FakeStrictRedis
# noinspection PyPep8Naming
import snappass.main as snappass
__author__ = 'davedash'
class SnapPassTestCase(TestCase):
@mock.patch('redis.client.StrictRedis', FakeStrictRedis)
def test_get_password(self):
password = "melatonin overdose 1337!$"
key = snappass.set_password(password, 30)
self.assertEqual(password, snappass.get_password(key))
# Assert that we can't look this up a second time.
self.assertIsNone(snappass.get_password(key))
def test_password_is_not_stored_in_plaintext(self):
password = "trustno1"
token = snappass.set_password(password, 30)
redis_key = token.split(snappass.TOKEN_SEPARATOR)[0]
stored_password_text = snappass.redis_client.get(redis_key).decode('utf-8')
self.assertNotIn(password, stored_password_text)
def test_returned_token_format(self):
password = "trustsome1"
token = snappass.set_password(password, 30)
token_fragments = token.split(snappass.TOKEN_SEPARATOR)
self.assertEqual(2, len(token_fragments))
redis_key, encryption_key = token_fragments
self.assertEqual(32 + len(snappass.REDIS_PREFIX), len(redis_key))
try:
Fernet(encryption_key.encode('utf-8'))
except ValueError:
self.fail('the encryption key is not valid')
def test_encryption_key_is_returned(self):
password = "trustany1"
token = snappass.set_password(password, 30)
token_fragments = token.split(snappass.TOKEN_SEPARATOR)
redis_key, encryption_key = token_fragments
stored_password = snappass.redis_client.get(redis_key)
fernet = Fernet(encryption_key.encode('utf-8'))
decrypted_password = fernet.decrypt(stored_password).decode('utf-8')
self.assertEqual(password, decrypted_password)
def test_unencrypted_passwords_still_work(self):
unencrypted_password = "trustevery1"
storage_key = uuid.uuid4().hex
snappass.redis_client.setex(storage_key, 30, unencrypted_password)
retrieved_password = snappass.get_password(storage_key)
self.assertEqual(unencrypted_password, retrieved_password)
def test_password_is_decoded(self):
password = "correct horse battery staple"
key = snappass.set_password(password, 30)
self.assertFalse(isinstance(snappass.get_password(key), bytes))
def test_clean_input(self):
# Test Bad Data
with snappass.app.test_request_context(
"/", data={'password': 'foo', 'ttl': 'bar'}, method='POST'):
self.assertRaises(BadRequest, snappass.clean_input)
# No Password
with snappass.app.test_request_context(
"/", method='POST'):
self.assertRaises(BadRequest, snappass.clean_input)
# No TTL
with snappass.app.test_request_context(
"/", data={'password': 'foo'}, method='POST'):
self.assertRaises(BadRequest, snappass.clean_input)
with snappass.app.test_request_context(
"/", data={'password': 'foo', 'ttl': 'hour'}, method='POST'):
self.assertEqual((3600, 'foo'), snappass.clean_input())
def test_password_before_expiration(self):
password = 'fidelio'
key = snappass.set_password(password, 1)
self.assertEqual(password, snappass.get_password(key))
def test_password_after_expiration(self):
password = 'open sesame'
key = snappass.set_password(password, 1)
time.sleep(1.5)
self.assertIsNone(snappass.get_password(key))
class SnapPassRoutesTestCase(TestCase):
# noinspection PyPep8Naming
def setUp(self):
snappass.app.config['TESTING'] = True
self.app = snappass.app.test_client()
def test_preview_password(self):
password = "I like novelty kitten statues!"
key = snappass.set_password(password, 30)
rv = self.app.get('/{0}'.format(key))
self.assertNotIn(password, rv.get_data(as_text=True))
def test_show_password(self):
password = "I like novelty kitten statues!"
key = snappass.set_password(password, 30)
rv = self.app.post('/{0}'.format(key))
self.assertIn(password, rv.get_data(as_text=True))
def test_url_prefix(self):
password = "I like novelty kitten statues!"
snappass.URL_PREFIX = "/test/prefix"
rv = self.app.post('/', data={'password': password, 'ttl': 'hour'})
self.assertIn("localhost/test/prefix/", rv.get_data(as_text=True))
def test_set_password(self):
with freeze_time("2020-05-08 12:00:00") as frozen_time:
password = 'my name is my passport. verify me.'
rv = self.app.post('/', data={'password': password, 'ttl': 'two weeks'})
html_content = rv.data.decode("ascii")
key = re.search(r'id="password-link" value="https://localhost/([^"]+)', html_content).group(1)
key = unquote(key)
frozen_time.move_to("2020-05-22 11:59:59")
self.assertEqual(snappass.get_password(key), password)
frozen_time.move_to("2020-05-22 12:00:00")
self.assertIsNone(snappass.get_password(key))
def test_set_password_json(self):
with freeze_time("2020-05-08 12:00:00") as frozen_time:
password = 'my name is my passport. verify me.'
rv = self.app.post(
'/',
headers={'Accept': 'application/json'},
data={'password': password, 'ttl': 'two weeks'},
)
json_content = rv.get_json()
key = re.search(r'https://localhost/([^"]+)', json_content['link']).group(1)
key = unquote(key)
frozen_time.move_to("2020-05-22 11:59:59")
self.assertEqual(snappass.get_password(key), password)
frozen_time.move_to("2020-05-22 12:00:00")
self.assertIsNone(snappass.get_password(key))
if __name__ == '__main__':
unittest.main()
| 38.567073 | 106 | 0.655178 | import re
import time
import unittest
import uuid
from unittest import TestCase
from unittest import mock
from urllib.parse import unquote
from cryptography.fernet import Fernet
from freezegun import freeze_time
from werkzeug.exceptions import BadRequest
from fakeredis import FakeStrictRedis
import snappass.main as snappass
__author__ = 'davedash'
class SnapPassTestCase(TestCase):
@mock.patch('redis.client.StrictRedis', FakeStrictRedis)
def test_get_password(self):
password = "melatonin overdose 1337!$"
key = snappass.set_password(password, 30)
self.assertEqual(password, snappass.get_password(key))
self.assertIsNone(snappass.get_password(key))
def test_password_is_not_stored_in_plaintext(self):
password = "trustno1"
token = snappass.set_password(password, 30)
redis_key = token.split(snappass.TOKEN_SEPARATOR)[0]
stored_password_text = snappass.redis_client.get(redis_key).decode('utf-8')
self.assertNotIn(password, stored_password_text)
def test_returned_token_format(self):
password = "trustsome1"
token = snappass.set_password(password, 30)
token_fragments = token.split(snappass.TOKEN_SEPARATOR)
self.assertEqual(2, len(token_fragments))
redis_key, encryption_key = token_fragments
self.assertEqual(32 + len(snappass.REDIS_PREFIX), len(redis_key))
try:
Fernet(encryption_key.encode('utf-8'))
except ValueError:
self.fail('the encryption key is not valid')
def test_encryption_key_is_returned(self):
password = "trustany1"
token = snappass.set_password(password, 30)
token_fragments = token.split(snappass.TOKEN_SEPARATOR)
redis_key, encryption_key = token_fragments
stored_password = snappass.redis_client.get(redis_key)
fernet = Fernet(encryption_key.encode('utf-8'))
decrypted_password = fernet.decrypt(stored_password).decode('utf-8')
self.assertEqual(password, decrypted_password)
def test_unencrypted_passwords_still_work(self):
unencrypted_password = "trustevery1"
storage_key = uuid.uuid4().hex
snappass.redis_client.setex(storage_key, 30, unencrypted_password)
retrieved_password = snappass.get_password(storage_key)
self.assertEqual(unencrypted_password, retrieved_password)
def test_password_is_decoded(self):
password = "correct horse battery staple"
key = snappass.set_password(password, 30)
self.assertFalse(isinstance(snappass.get_password(key), bytes))
def test_clean_input(self):
# Test Bad Data
with snappass.app.test_request_context(
"/", data={'password': 'foo', 'ttl': 'bar'}, method='POST'):
self.assertRaises(BadRequest, snappass.clean_input)
# No Password
with snappass.app.test_request_context(
"/", method='POST'):
self.assertRaises(BadRequest, snappass.clean_input)
# No TTL
with snappass.app.test_request_context(
"/", data={'password': 'foo'}, method='POST'):
self.assertRaises(BadRequest, snappass.clean_input)
with snappass.app.test_request_context(
"/", data={'password': 'foo', 'ttl': 'hour'}, method='POST'):
self.assertEqual((3600, 'foo'), snappass.clean_input())
def test_password_before_expiration(self):
password = 'fidelio'
key = snappass.set_password(password, 1)
self.assertEqual(password, snappass.get_password(key))
def test_password_after_expiration(self):
password = 'open sesame'
key = snappass.set_password(password, 1)
time.sleep(1.5)
self.assertIsNone(snappass.get_password(key))
class SnapPassRoutesTestCase(TestCase):
# noinspection PyPep8Naming
def setUp(self):
snappass.app.config['TESTING'] = True
self.app = snappass.app.test_client()
def test_preview_password(self):
password = "I like novelty kitten statues!"
key = snappass.set_password(password, 30)
rv = self.app.get('/{0}'.format(key))
self.assertNotIn(password, rv.get_data(as_text=True))
def test_show_password(self):
password = "I like novelty kitten statues!"
key = snappass.set_password(password, 30)
rv = self.app.post('/{0}'.format(key))
self.assertIn(password, rv.get_data(as_text=True))
def test_url_prefix(self):
password = "I like novelty kitten statues!"
snappass.URL_PREFIX = "/test/prefix"
rv = self.app.post('/', data={'password': password, 'ttl': 'hour'})
self.assertIn("localhost/test/prefix/", rv.get_data(as_text=True))
def test_set_password(self):
with freeze_time("2020-05-08 12:00:00") as frozen_time:
password = 'my name is my passport. verify me.'
rv = self.app.post('/', data={'password': password, 'ttl': 'two weeks'})
html_content = rv.data.decode("ascii")
key = re.search(r'id="password-link" value="https://localhost/([^"]+)', html_content).group(1)
key = unquote(key)
frozen_time.move_to("2020-05-22 11:59:59")
self.assertEqual(snappass.get_password(key), password)
frozen_time.move_to("2020-05-22 12:00:00")
self.assertIsNone(snappass.get_password(key))
def test_set_password_json(self):
with freeze_time("2020-05-08 12:00:00") as frozen_time:
password = 'my name is my passport. verify me.'
rv = self.app.post(
'/',
headers={'Accept': 'application/json'},
data={'password': password, 'ttl': 'two weeks'},
)
json_content = rv.get_json()
key = re.search(r'https://localhost/([^"]+)', json_content['link']).group(1)
key = unquote(key)
frozen_time.move_to("2020-05-22 11:59:59")
self.assertEqual(snappass.get_password(key), password)
frozen_time.move_to("2020-05-22 12:00:00")
self.assertIsNone(snappass.get_password(key))
if __name__ == '__main__':
unittest.main()
| true | true |
1c3860194879062b51704087b47e5edb5d95420a | 11,828 | py | Python | src/probnum/randprocs/markov/integrator/_iwp.py | alpiges/probnum | 2e4153cb0df559984e09ec74487ef6c9d3f6d464 | [
"MIT"
] | null | null | null | src/probnum/randprocs/markov/integrator/_iwp.py | alpiges/probnum | 2e4153cb0df559984e09ec74487ef6c9d3f6d464 | [
"MIT"
] | 40 | 2021-04-12T07:56:29.000Z | 2022-03-28T00:18:18.000Z | src/probnum/randprocs/markov/integrator/_iwp.py | alpiges/probnum | 2e4153cb0df559984e09ec74487ef6c9d3f6d464 | [
"MIT"
] | null | null | null | """Integrated Brownian motion."""
try:
# cached_property is only available in Python >=3.8
from functools import cached_property
except ImportError:
from cached_property import cached_property
import warnings
import numpy as np
import scipy.special
from probnum import config, linops, randvars
from probnum.randprocs.markov import _markov_process, continuous, discrete
from probnum.randprocs.markov.integrator import _integrator, _preconditioner
class IntegratedWienerProcess(_markov_process.MarkovProcess):
r"""Integrated Wiener process.
Convenience access to :math:`\nu` times integrated (:math:`d` dimensional) Wiener processes.
Parameters
----------
initarg
Initial time point.
num_derivatives
Number of modelled derivatives of the integrated process (''order'', ''number of integrations'').
Optional. Default is :math:`\nu=1`.
wiener_process_dimension
Dimension of the underlying Wiener process.
Optional. Default is :math:`d=1`.
The dimension of the integrated Wiener process itself is :math:`d(\nu + 1)`.
initrv
Law of the integrated Wiener process at the initial time point.
Optional. Default is a :math:`d(\nu + 1)` dimensional standard-normal distribution.
diffuse
Whether to instantiate a diffuse prior. A diffuse prior has large initial variances.
Optional. Default is `False`.
If `True`, and if an initial random variable is not passed, an initial random variable is created,
where the initial covariance is of the form :math:`\kappa I_{d(\nu + 1)}`
with :math:`\kappa=10^6`.
Diffuse priors are used when initial distributions are not known.
They are common for filtering-based probabilistic ODE solvers.
forward_implementation
Implementation of the forward-propagation in the underlying transitions.
Optional. Default is `classic`. `sqrt` implementation is more computationally expensive, but also more stable.
backward_implementation
Implementation of the backward-conditioning in the underlying transitions.
Optional. Default is `classic`. `sqrt` implementation is more computationally expensive, but also more stable.
Raises
------
Warning
If `initrv` is not None and `diffuse` is True.
Examples
--------
>>> iwp1 = IntegratedWienerProcess(initarg=0.)
>>> print(iwp1)
<IntegratedWienerProcess with input_dim=1, output_dim=2, dtype=float64>
>>> iwp2 = IntegratedWienerProcess(initarg=0., num_derivatives=2)
>>> print(iwp2)
<IntegratedWienerProcess with input_dim=1, output_dim=3, dtype=float64>
>>> iwp3 = IntegratedWienerProcess(initarg=0., wiener_process_dimension=10)
>>> print(iwp3)
<IntegratedWienerProcess with input_dim=1, output_dim=20, dtype=float64>
>>> iwp4 = IntegratedWienerProcess(initarg=0., num_derivatives=4, wiener_process_dimension=1)
>>> print(iwp4)
<IntegratedWienerProcess with input_dim=1, output_dim=5, dtype=float64>
"""
def __init__(
self,
initarg,
num_derivatives=1,
wiener_process_dimension=1,
initrv=None,
diffuse=False,
forward_implementation="classic",
backward_implementation="classic",
):
iwp_transition = IntegratedWienerTransition(
num_derivatives=num_derivatives,
wiener_process_dimension=wiener_process_dimension,
forward_implementation=forward_implementation,
backward_implementation=backward_implementation,
)
if initrv is not None and diffuse:
warnings.warn(
"Parameter `diffuse` has no effect, because an `initrv` has been provided."
)
if initrv is None:
if diffuse:
scale_cholesky = 1e3
else:
scale_cholesky = 1.0
zeros = np.zeros(iwp_transition.state_dimension)
cov_cholesky = scale_cholesky * np.eye(iwp_transition.state_dimension)
initrv = randvars.Normal(
mean=zeros, cov=cov_cholesky ** 2, cov_cholesky=cov_cholesky
)
super().__init__(transition=iwp_transition, initrv=initrv, initarg=initarg)
class IntegratedWienerTransition(_integrator.IntegratorTransition, continuous.LTISDE):
"""Integrated Brownian motion in :math:`d` dimensions."""
def __init__(
self,
num_derivatives,
wiener_process_dimension,
forward_implementation="classic",
backward_implementation="classic",
):
# initialise BOTH superclasses' inits.
# I don't like it either, but it does the job.
_integrator.IntegratorTransition.__init__(
self,
num_derivatives=num_derivatives,
wiener_process_dimension=wiener_process_dimension,
)
continuous.LTISDE.__init__(
self,
drift_matrix=self._drift_matrix,
force_vector=self._force_vector,
dispersion_matrix=self._dispersion_matrix,
forward_implementation=forward_implementation,
backward_implementation=backward_implementation,
)
@cached_property
def _drift_matrix(self):
drift_matrix_1d = np.diag(np.ones(self.num_derivatives), 1)
if config.matrix_free:
return linops.Kronecker(
A=linops.Identity(self.wiener_process_dimension),
B=linops.Matrix(A=drift_matrix_1d),
)
return np.kron(np.eye(self.wiener_process_dimension), drift_matrix_1d)
@cached_property
def _force_vector(self):
return np.zeros((self.wiener_process_dimension * (self.num_derivatives + 1)))
@cached_property
def _dispersion_matrix(self):
dispersion_matrix_1d = np.zeros(self.num_derivatives + 1)
dispersion_matrix_1d[-1] = 1.0 # Unit diffusion
if config.matrix_free:
return linops.Kronecker(
A=linops.Identity(self.wiener_process_dimension),
B=linops.Matrix(A=dispersion_matrix_1d.reshape(-1, 1)),
)
return np.kron(np.eye(self.wiener_process_dimension), dispersion_matrix_1d).T
@cached_property
def equivalent_discretisation_preconditioned(self):
"""Discretised IN THE PRECONDITIONED SPACE.
The preconditioned state transition is the flipped Pascal matrix.
The preconditioned process noise covariance is the flipped Hilbert matrix.
The shift is always zero.
Reference: https://arxiv.org/abs/2012.10106
"""
state_transition_1d = np.flip(
scipy.linalg.pascal(self.num_derivatives + 1, kind="lower", exact=False)
)
if config.matrix_free:
state_transition = linops.Kronecker(
A=linops.Identity(self.wiener_process_dimension),
B=linops.aslinop(state_transition_1d),
)
else:
state_transition = np.kron(
np.eye(self.wiener_process_dimension), state_transition_1d
)
process_noise_1d = np.flip(scipy.linalg.hilbert(self.num_derivatives + 1))
if config.matrix_free:
process_noise = linops.Kronecker(
A=linops.Identity(self.wiener_process_dimension),
B=linops.aslinop(process_noise_1d),
)
else:
process_noise = np.kron(
np.eye(self.wiener_process_dimension), process_noise_1d
)
empty_shift = np.zeros(
self.wiener_process_dimension * (self.num_derivatives + 1)
)
process_noise_cholesky_1d = np.linalg.cholesky(process_noise_1d)
if config.matrix_free:
process_noise_cholesky = linops.Kronecker(
A=linops.Identity(self.wiener_process_dimension),
B=linops.aslinop(process_noise_cholesky_1d),
)
else:
process_noise_cholesky = np.kron(
np.eye(self.wiener_process_dimension), process_noise_cholesky_1d
)
return discrete.LTIGaussian(
state_trans_mat=state_transition,
shift_vec=empty_shift,
proc_noise_cov_mat=process_noise,
proc_noise_cov_cholesky=process_noise_cholesky,
forward_implementation=self.forward_implementation,
backward_implementation=self.backward_implementation,
)
def forward_rv(
self,
rv,
t,
dt=None,
compute_gain=False,
_diffusion=1.0,
**kwargs,
):
if dt is None:
raise ValueError(
"Continuous-time transitions require a time-increment ``dt``."
)
rv = _preconditioner.apply_precon(self.precon.inverse(dt), rv)
rv, info = self.equivalent_discretisation_preconditioned.forward_rv(
rv, t, compute_gain=compute_gain, _diffusion=_diffusion
)
info["crosscov"] = self.precon(dt) @ info["crosscov"] @ self.precon(dt).T
if "gain" in info:
info["gain"] = self.precon(dt) @ info["gain"] @ self.precon.inverse(dt).T
return _preconditioner.apply_precon(self.precon(dt), rv), info
def backward_rv(
self,
rv_obtained,
rv,
rv_forwarded=None,
gain=None,
t=None,
dt=None,
_diffusion=1.0,
**kwargs,
):
if dt is None:
raise ValueError(
"Continuous-time transitions require a time-increment ``dt``."
)
rv_obtained = _preconditioner.apply_precon(self.precon.inverse(dt), rv_obtained)
rv = _preconditioner.apply_precon(self.precon.inverse(dt), rv)
rv_forwarded = (
_preconditioner.apply_precon(self.precon.inverse(dt), rv_forwarded)
if rv_forwarded is not None
else None
)
gain = (
self.precon.inverse(dt) @ gain @ self.precon.inverse(dt).T
if gain is not None
else None
)
rv, info = self.equivalent_discretisation_preconditioned.backward_rv(
rv_obtained=rv_obtained,
rv=rv,
rv_forwarded=rv_forwarded,
gain=gain,
t=t,
_diffusion=_diffusion,
)
return _preconditioner.apply_precon(self.precon(dt), rv), info
def discretise(self, dt):
"""Equivalent discretisation of the process.
Overwrites matrix-fraction decomposition in the super-class. Only present for
user's convenience and to maintain a clean interface. Not used for forward_rv,
etc..
"""
state_trans_mat = (
self.precon(dt)
@ self.equivalent_discretisation_preconditioned.state_trans_mat
@ self.precon.inverse(dt)
)
proc_noise_cov_mat = (
self.precon(dt)
@ self.equivalent_discretisation_preconditioned.proc_noise_cov_mat
@ self.precon(dt).T
)
zero_shift = np.zeros(state_trans_mat.shape[0])
# The Cholesky factor of the process noise covariance matrix of the IBM
# always exists, even for non-square root implementations.
proc_noise_cov_cholesky = (
self.precon(dt)
@ self.equivalent_discretisation_preconditioned.proc_noise_cov_cholesky
)
return discrete.LTIGaussian(
state_trans_mat=state_trans_mat,
shift_vec=zero_shift,
proc_noise_cov_mat=proc_noise_cov_mat,
proc_noise_cov_cholesky=proc_noise_cov_cholesky,
forward_implementation=self.forward_implementation,
backward_implementation=self.forward_implementation,
)
| 37.07837 | 118 | 0.644234 |
try:
from functools import cached_property
except ImportError:
from cached_property import cached_property
import warnings
import numpy as np
import scipy.special
from probnum import config, linops, randvars
from probnum.randprocs.markov import _markov_process, continuous, discrete
from probnum.randprocs.markov.integrator import _integrator, _preconditioner
class IntegratedWienerProcess(_markov_process.MarkovProcess):
def __init__(
self,
initarg,
num_derivatives=1,
wiener_process_dimension=1,
initrv=None,
diffuse=False,
forward_implementation="classic",
backward_implementation="classic",
):
iwp_transition = IntegratedWienerTransition(
num_derivatives=num_derivatives,
wiener_process_dimension=wiener_process_dimension,
forward_implementation=forward_implementation,
backward_implementation=backward_implementation,
)
if initrv is not None and diffuse:
warnings.warn(
"Parameter `diffuse` has no effect, because an `initrv` has been provided."
)
if initrv is None:
if diffuse:
scale_cholesky = 1e3
else:
scale_cholesky = 1.0
zeros = np.zeros(iwp_transition.state_dimension)
cov_cholesky = scale_cholesky * np.eye(iwp_transition.state_dimension)
initrv = randvars.Normal(
mean=zeros, cov=cov_cholesky ** 2, cov_cholesky=cov_cholesky
)
super().__init__(transition=iwp_transition, initrv=initrv, initarg=initarg)
class IntegratedWienerTransition(_integrator.IntegratorTransition, continuous.LTISDE):
def __init__(
self,
num_derivatives,
wiener_process_dimension,
forward_implementation="classic",
backward_implementation="classic",
):
# I don't like it either, but it does the job.
_integrator.IntegratorTransition.__init__(
self,
num_derivatives=num_derivatives,
wiener_process_dimension=wiener_process_dimension,
)
continuous.LTISDE.__init__(
self,
drift_matrix=self._drift_matrix,
force_vector=self._force_vector,
dispersion_matrix=self._dispersion_matrix,
forward_implementation=forward_implementation,
backward_implementation=backward_implementation,
)
@cached_property
def _drift_matrix(self):
drift_matrix_1d = np.diag(np.ones(self.num_derivatives), 1)
if config.matrix_free:
return linops.Kronecker(
A=linops.Identity(self.wiener_process_dimension),
B=linops.Matrix(A=drift_matrix_1d),
)
return np.kron(np.eye(self.wiener_process_dimension), drift_matrix_1d)
@cached_property
def _force_vector(self):
return np.zeros((self.wiener_process_dimension * (self.num_derivatives + 1)))
@cached_property
def _dispersion_matrix(self):
dispersion_matrix_1d = np.zeros(self.num_derivatives + 1)
dispersion_matrix_1d[-1] = 1.0
if config.matrix_free:
return linops.Kronecker(
A=linops.Identity(self.wiener_process_dimension),
B=linops.Matrix(A=dispersion_matrix_1d.reshape(-1, 1)),
)
return np.kron(np.eye(self.wiener_process_dimension), dispersion_matrix_1d).T
@cached_property
def equivalent_discretisation_preconditioned(self):
state_transition_1d = np.flip(
scipy.linalg.pascal(self.num_derivatives + 1, kind="lower", exact=False)
)
if config.matrix_free:
state_transition = linops.Kronecker(
A=linops.Identity(self.wiener_process_dimension),
B=linops.aslinop(state_transition_1d),
)
else:
state_transition = np.kron(
np.eye(self.wiener_process_dimension), state_transition_1d
)
process_noise_1d = np.flip(scipy.linalg.hilbert(self.num_derivatives + 1))
if config.matrix_free:
process_noise = linops.Kronecker(
A=linops.Identity(self.wiener_process_dimension),
B=linops.aslinop(process_noise_1d),
)
else:
process_noise = np.kron(
np.eye(self.wiener_process_dimension), process_noise_1d
)
empty_shift = np.zeros(
self.wiener_process_dimension * (self.num_derivatives + 1)
)
process_noise_cholesky_1d = np.linalg.cholesky(process_noise_1d)
if config.matrix_free:
process_noise_cholesky = linops.Kronecker(
A=linops.Identity(self.wiener_process_dimension),
B=linops.aslinop(process_noise_cholesky_1d),
)
else:
process_noise_cholesky = np.kron(
np.eye(self.wiener_process_dimension), process_noise_cholesky_1d
)
return discrete.LTIGaussian(
state_trans_mat=state_transition,
shift_vec=empty_shift,
proc_noise_cov_mat=process_noise,
proc_noise_cov_cholesky=process_noise_cholesky,
forward_implementation=self.forward_implementation,
backward_implementation=self.backward_implementation,
)
def forward_rv(
self,
rv,
t,
dt=None,
compute_gain=False,
_diffusion=1.0,
**kwargs,
):
if dt is None:
raise ValueError(
"Continuous-time transitions require a time-increment ``dt``."
)
rv = _preconditioner.apply_precon(self.precon.inverse(dt), rv)
rv, info = self.equivalent_discretisation_preconditioned.forward_rv(
rv, t, compute_gain=compute_gain, _diffusion=_diffusion
)
info["crosscov"] = self.precon(dt) @ info["crosscov"] @ self.precon(dt).T
if "gain" in info:
info["gain"] = self.precon(dt) @ info["gain"] @ self.precon.inverse(dt).T
return _preconditioner.apply_precon(self.precon(dt), rv), info
def backward_rv(
self,
rv_obtained,
rv,
rv_forwarded=None,
gain=None,
t=None,
dt=None,
_diffusion=1.0,
**kwargs,
):
if dt is None:
raise ValueError(
"Continuous-time transitions require a time-increment ``dt``."
)
rv_obtained = _preconditioner.apply_precon(self.precon.inverse(dt), rv_obtained)
rv = _preconditioner.apply_precon(self.precon.inverse(dt), rv)
rv_forwarded = (
_preconditioner.apply_precon(self.precon.inverse(dt), rv_forwarded)
if rv_forwarded is not None
else None
)
gain = (
self.precon.inverse(dt) @ gain @ self.precon.inverse(dt).T
if gain is not None
else None
)
rv, info = self.equivalent_discretisation_preconditioned.backward_rv(
rv_obtained=rv_obtained,
rv=rv,
rv_forwarded=rv_forwarded,
gain=gain,
t=t,
_diffusion=_diffusion,
)
return _preconditioner.apply_precon(self.precon(dt), rv), info
def discretise(self, dt):
state_trans_mat = (
self.precon(dt)
@ self.equivalent_discretisation_preconditioned.state_trans_mat
@ self.precon.inverse(dt)
)
proc_noise_cov_mat = (
self.precon(dt)
@ self.equivalent_discretisation_preconditioned.proc_noise_cov_mat
@ self.precon(dt).T
)
zero_shift = np.zeros(state_trans_mat.shape[0])
proc_noise_cov_cholesky = (
self.precon(dt)
@ self.equivalent_discretisation_preconditioned.proc_noise_cov_cholesky
)
return discrete.LTIGaussian(
state_trans_mat=state_trans_mat,
shift_vec=zero_shift,
proc_noise_cov_mat=proc_noise_cov_mat,
proc_noise_cov_cholesky=proc_noise_cov_cholesky,
forward_implementation=self.forward_implementation,
backward_implementation=self.forward_implementation,
)
| true | true |
1c386043c55d45b1b88ea0a2646c4901ed16ed5a | 950 | py | Python | distribulator/engine/mode/Mode.py | adace74/test | b7b45fae6c4ba6ffe8a04c45b6e2bf23a918226b | [
"BSD-3-Clause"
] | null | null | null | distribulator/engine/mode/Mode.py | adace74/test | b7b45fae6c4ba6ffe8a04c45b6e2bf23a918226b | [
"BSD-3-Clause"
] | null | null | null | distribulator/engine/mode/Mode.py | adace74/test | b7b45fae6c4ba6ffe8a04c45b6e2bf23a918226b | [
"BSD-3-Clause"
] | null | null | null | ######################################################################
#
# $Id$
#
# (c) Copyright 2004 Orbitz, Inc. All Rights Reserved.
# Please see the accompanying LICENSE file for license information.
#
######################################################################
# Pydoc comments
"""This class is the base abstract class for all runtime modes."""
# Version tag
__version__= '$Revision$'[11:-2]
# Standard modules
# Custom modules
######################################################################
class Mode:
"""This class is the base abstract class for all runtime modes."""
def __init__(self, PassedGlobalConfig):
"""Constructor."""
pass
######################################################################
def invoke(self):
"""This method is the main entry point into tons of custom logic."""
pass
######################################################################
| 25 | 76 | 0.413684 | true | true | |
1c38615b6e5a27005bd983f0f82fb8dfb4efac93 | 425 | py | Python | rest/notification/list-get-example-1/list-get-example-1.5.x.py | azaddeveloper/api-snippets | f88b153cd7186fa70b33733b205886502db0d1f2 | [
"MIT"
] | 2 | 2017-11-23T11:31:20.000Z | 2018-01-22T04:14:02.000Z | rest/notification/list-get-example-1/list-get-example-1.5.x.py | azaddeveloper/api-snippets | f88b153cd7186fa70b33733b205886502db0d1f2 | [
"MIT"
] | null | null | null | rest/notification/list-get-example-1/list-get-example-1.5.x.py | azaddeveloper/api-snippets | f88b153cd7186fa70b33733b205886502db0d1f2 | [
"MIT"
] | 2 | 2020-05-22T23:31:21.000Z | 2021-06-10T18:33:45.000Z | # Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import TwilioRestClient
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = TwilioRestClient(account_sid, auth_token)
# A list of notification objects with the properties described above
notifications = client.notifications.list()
| 38.636364 | 72 | 0.828235 |
from twilio.rest import TwilioRestClient
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
client = TwilioRestClient(account_sid, auth_token)
notifications = client.notifications.list()
| true | true |
1c38619eee36a6d8e0be87801ad8d87b80681fc2 | 6,617 | py | Python | assignment1/cs231n/classifiers/fc_net.py | qiaw99/CS231n-Convolutional-Neural-Networks-for-Visual-Recognition | 5949b4f68f04e23751879dbc679e7708735d313f | [
"Apache-2.0"
] | null | null | null | assignment1/cs231n/classifiers/fc_net.py | qiaw99/CS231n-Convolutional-Neural-Networks-for-Visual-Recognition | 5949b4f68f04e23751879dbc679e7708735d313f | [
"Apache-2.0"
] | null | null | null | assignment1/cs231n/classifiers/fc_net.py | qiaw99/CS231n-Convolutional-Neural-Networks-for-Visual-Recognition | 5949b4f68f04e23751879dbc679e7708735d313f | [
"Apache-2.0"
] | null | null | null | from builtins import range
from builtins import object
import numpy as np
from ..layers import *
from ..layer_utils import *
class TwoLayerNet(object):
"""
A two-layer fully-connected neural network with ReLU nonlinearity and
softmax loss that uses a modular layer design. We assume an input dimension
of D, a hidden dimension of H, and perform classification over C classes.
The architecure should be affine - relu - affine - softmax.
Note that this class does not implement gradient descent; instead, it
will interact with a separate Solver object that is responsible for running
optimization.
The learnable parameters of the model are stored in the dictionary
self.params that maps parameter names to numpy arrays.
"""
def __init__(
self,
input_dim=3 * 32 * 32,
hidden_dim=100,
num_classes=10,
weight_scale=1e-3,
reg=0.0,
):
"""
Initialize a new network.
Inputs:
- input_dim: An integer giving the size of the input
- hidden_dim: An integer giving the size of the hidden layer
- num_classes: An integer giving the number of classes to classify
- weight_scale: Scalar giving the standard deviation for random
initialization of the weights.
- reg: Scalar giving L2 regularization strength.
"""
self.params = {}
self.reg = reg
############################################################################
# TODO: Initialize the weights and biases of the two-layer net. Weights #
# should be initialized from a Gaussian centered at 0.0 with #
# standard deviation equal to weight_scale, and biases should be #
# initialized to zero. All weights and biases should be stored in the #
# dictionary self.params, with first layer weights #
# and biases using the keys 'W1' and 'b1' and second layer #
# weights and biases using the keys 'W2' and 'b2'. #
############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
self.params = {"W1": np.random.normal(0, weight_scale, size=(input_dim, hidden_dim)),
"b1": np.zeros(hidden_dim),
"W2": np.random.normal(0, weight_scale, size=(hidden_dim, num_classes)),
"b2": np.zeros(num_classes)
}
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
############################################################################
# END OF YOUR CODE #
############################################################################
def loss(self, X, y=None):
"""
Compute loss and gradient for a minibatch of data.
Inputs:
- X: Array of input data of shape (N, d_1, ..., d_k)
- y: Array of labels, of shape (N,). y[i] gives the label for X[i].
Returns:
If y is None, then run a test-time forward pass of the model and return:
- scores: Array of shape (N, C) giving classification scores, where
scores[i, c] is the classification score for X[i] and class c.
If y is not None, then run a training-time forward and backward pass and
return a tuple of:
- loss: Scalar value giving the loss
- grads: Dictionary with the same keys as self.params, mapping parameter
names to gradients of the loss with respect to those parameters.
"""
scores = None
############################################################################
# TODO: Implement the forward pass for the two-layer net, computing the #
# class scores for X and storing them in the scores variable. #
############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
relu_output, relu_cache = affine_relu_forward(X, self.params['W1'], self.params['b1'])
scores, cache = affine_forward(relu_output, self.params['W2'], self.params['b2'])
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
############################################################################
# END OF YOUR CODE #
############################################################################
# If y is None then we are in test mode so just return scores
if y is None:
return scores
loss, grads = 0, {}
############################################################################
# TODO: Implement the backward pass for the two-layer net. Store the loss #
# in the loss variable and gradients in the grads dictionary. Compute data #
# loss using softmax, and make sure that grads[k] holds the gradients for #
# self.params[k]. Don't forget to add L2 regularization! #
# #
# NOTE: To ensure that your implementation matches ours and you pass the #
# automated tests, make sure that your L2 regularization includes a factor #
# of 0.5 to simplify the expression for the gradient. #
############################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
loss, d_scores = softmax_loss(scores, y)
loss += 0.5 * self.reg * np.sum(self.params['W1'] * self.params['W1']) + 0.5 * self.reg * np.sum(self.params['W2'] * self.params['W2'])
dx, dw, db = affine_backward(d_scores, cache)
grads["W2"] = dw
grads["b2"] = db
dx, dw, db = affine_relu_backward(dx, relu_cache)
grads["W1"] = dw
grads["b1"] = db
grads["W1"] += self.reg * self.params['W1']
grads["W2"] += self.reg * self.params['W2']
# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
############################################################################
# END OF YOUR CODE #
############################################################################
return loss, grads
| 47.264286 | 143 | 0.484207 | from builtins import range
from builtins import object
import numpy as np
from ..layers import *
from ..layer_utils import *
class TwoLayerNet(object):
def __init__(
self,
input_dim=3 * 32 * 32,
hidden_dim=100,
num_classes=10,
weight_scale=1e-3,
reg=0.0,
):
self.params = {}
self.reg = reg
| true | true |
1c3861b42892e0760a2b57f037101b3f9ff8dd62 | 11,482 | py | Python | pcdet/models/roi_heads/voxelrcnn_head.py | EmiyaNing/OpenPCDet | 41ff28209cb000b51626a0ed8593b0adbe3dd447 | [
"Apache-2.0"
] | null | null | null | pcdet/models/roi_heads/voxelrcnn_head.py | EmiyaNing/OpenPCDet | 41ff28209cb000b51626a0ed8593b0adbe3dd447 | [
"Apache-2.0"
] | null | null | null | pcdet/models/roi_heads/voxelrcnn_head.py | EmiyaNing/OpenPCDet | 41ff28209cb000b51626a0ed8593b0adbe3dd447 | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
from ...ops.pointnet2.pointnet2_stack import voxel_pool_modules as voxelpool_stack_modules
from ...utils import common_utils
from .roi_head_template import RoIHeadTemplate
class VoxelRCNNHead(RoIHeadTemplate):
def __init__(self, backbone_channels, model_cfg, point_cloud_range, voxel_size, num_class=1, **kwargs):
super().__init__(num_class=num_class, model_cfg=model_cfg)
self.model_cfg = model_cfg
self.pool_cfg = model_cfg.ROI_GRID_POOL
LAYER_cfg = self.pool_cfg.POOL_LAYERS
self.point_cloud_range = point_cloud_range
self.voxel_size = voxel_size
c_out = 0
self.roi_grid_pool_layers = nn.ModuleList()
for src_name in self.pool_cfg.FEATURES_SOURCE:
mlps = LAYER_cfg[src_name].MLPS
for k in range(len(mlps)):
mlps[k] = [backbone_channels[src_name]] + mlps[k]
pool_layer = voxelpool_stack_modules.NeighborVoxelSAModuleMSG(
query_ranges=LAYER_cfg[src_name].QUERY_RANGES,
nsamples=LAYER_cfg[src_name].NSAMPLE,
radii=LAYER_cfg[src_name].POOL_RADIUS,
mlps=mlps,
pool_method=LAYER_cfg[src_name].POOL_METHOD,
)
self.roi_grid_pool_layers.append(pool_layer)
c_out += sum([x[-1] for x in mlps])
GRID_SIZE = self.model_cfg.ROI_GRID_POOL.GRID_SIZE
# c_out = sum([x[-1] for x in mlps])
pre_channel = GRID_SIZE * GRID_SIZE * GRID_SIZE * c_out
shared_fc_list = []
for k in range(0, self.model_cfg.SHARED_FC.__len__()):
shared_fc_list.extend([
nn.Linear(pre_channel, self.model_cfg.SHARED_FC[k], bias=False),
nn.BatchNorm1d(self.model_cfg.SHARED_FC[k]),
nn.ReLU(inplace=True)
])
pre_channel = self.model_cfg.SHARED_FC[k]
if k != self.model_cfg.SHARED_FC.__len__() - 1 and self.model_cfg.DP_RATIO > 0:
shared_fc_list.append(nn.Dropout(self.model_cfg.DP_RATIO))
self.shared_fc_layer = nn.Sequential(*shared_fc_list)
cls_fc_list = []
for k in range(0, self.model_cfg.CLS_FC.__len__()):
cls_fc_list.extend([
nn.Linear(pre_channel, self.model_cfg.CLS_FC[k], bias=False),
nn.BatchNorm1d(self.model_cfg.CLS_FC[k]),
nn.ReLU()
])
pre_channel = self.model_cfg.CLS_FC[k]
if k != self.model_cfg.CLS_FC.__len__() - 1 and self.model_cfg.DP_RATIO > 0:
cls_fc_list.append(nn.Dropout(self.model_cfg.DP_RATIO))
self.cls_fc_layers = nn.Sequential(*cls_fc_list)
self.cls_pred_layer = nn.Linear(pre_channel, self.num_class, bias=True)
reg_fc_list = []
for k in range(0, self.model_cfg.REG_FC.__len__()):
reg_fc_list.extend([
nn.Linear(pre_channel, self.model_cfg.REG_FC[k], bias=False),
nn.BatchNorm1d(self.model_cfg.REG_FC[k]),
nn.ReLU()
])
pre_channel = self.model_cfg.REG_FC[k]
if k != self.model_cfg.REG_FC.__len__() - 1 and self.model_cfg.DP_RATIO > 0:
reg_fc_list.append(nn.Dropout(self.model_cfg.DP_RATIO))
self.reg_fc_layers = nn.Sequential(*reg_fc_list)
self.reg_pred_layer = nn.Linear(pre_channel, self.box_coder.code_size * self.num_class, bias=True)
self.init_weights()
def init_weights(self):
init_func = nn.init.xavier_normal_
for module_list in [self.shared_fc_layer, self.cls_fc_layers, self.reg_fc_layers]:
for m in module_list.modules():
if isinstance(m, nn.Linear):
init_func(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
nn.init.normal_(self.cls_pred_layer.weight, 0, 0.01)
nn.init.constant_(self.cls_pred_layer.bias, 0)
nn.init.normal_(self.reg_pred_layer.weight, mean=0, std=0.001)
nn.init.constant_(self.reg_pred_layer.bias, 0)
# def _init_weights(self):
# init_func = nn.init.xavier_normal_
# for m in self.modules():
# if isinstance(m, nn.Conv2d) or isinstance(m, nn.Conv1d) or isinstance(m, nn.Linear):
# init_func(m.weight)
# if m.bias is not None:
# nn.init.constant_(m.bias, 0)
# nn.init.normal_(self.reg_layers[-1].weight, mean=0, std=0.001)
def roi_grid_pool(self, batch_dict):
"""
Args:
batch_dict:
batch_size:
rois: (B, num_rois, 7 + C)
point_coords: (num_points, 4) [bs_idx, x, y, z]
point_features: (num_points, C)
point_cls_scores: (N1 + N2 + N3 + ..., 1)
point_part_offset: (N1 + N2 + N3 + ..., 3)
Returns:
"""
rois = batch_dict['rois']
batch_size = batch_dict['batch_size']
with_vf_transform = batch_dict.get('with_voxel_feature_transform', False)
roi_grid_xyz, _ = self.get_global_grid_points_of_roi(
rois, grid_size=self.pool_cfg.GRID_SIZE
) # (BxN, 6x6x6, 3)
# roi_grid_xyz: (B, Nx6x6x6, 3)
roi_grid_xyz = roi_grid_xyz.view(batch_size, -1, 3)
# compute the voxel coordinates of grid points
roi_grid_coords_x = (roi_grid_xyz[:, :, 0:1] - self.point_cloud_range[0]) // self.voxel_size[0]
roi_grid_coords_y = (roi_grid_xyz[:, :, 1:2] - self.point_cloud_range[1]) // self.voxel_size[1]
roi_grid_coords_z = (roi_grid_xyz[:, :, 2:3] - self.point_cloud_range[2]) // self.voxel_size[2]
# roi_grid_coords: (B, Nx6x6x6, 3)
roi_grid_coords = torch.cat([roi_grid_coords_x, roi_grid_coords_y, roi_grid_coords_z], dim=-1)
batch_idx = rois.new_zeros(batch_size, roi_grid_coords.shape[1], 1)
for bs_idx in range(batch_size):
batch_idx[bs_idx, :, 0] = bs_idx
# roi_grid_coords: (B, Nx6x6x6, 4)
# roi_grid_coords = torch.cat([batch_idx, roi_grid_coords], dim=-1)
# roi_grid_coords = roi_grid_coords.int()
roi_grid_batch_cnt = rois.new_zeros(batch_size).int().fill_(roi_grid_coords.shape[1])
pooled_features_list = []
for k, src_name in enumerate(self.pool_cfg.FEATURES_SOURCE):
pool_layer = self.roi_grid_pool_layers[k]
cur_stride = batch_dict['multi_scale_3d_strides'][src_name]
cur_sp_tensors = batch_dict['multi_scale_3d_features'][src_name]
if with_vf_transform:
cur_sp_tensors = batch_dict['multi_scale_3d_features_post'][src_name]
else:
cur_sp_tensors = batch_dict['multi_scale_3d_features'][src_name]
# compute voxel center xyz and batch_cnt
cur_coords = cur_sp_tensors.indices
cur_voxel_xyz = common_utils.get_voxel_centers(
cur_coords[:, 1:4],
downsample_times=cur_stride,
voxel_size=self.voxel_size,
point_cloud_range=self.point_cloud_range
)
cur_voxel_xyz_batch_cnt = cur_voxel_xyz.new_zeros(batch_size).int()
for bs_idx in range(batch_size):
cur_voxel_xyz_batch_cnt[bs_idx] = (cur_coords[:, 0] == bs_idx).sum()
# get voxel2point tensor
v2p_ind_tensor = common_utils.generate_voxel2pinds(cur_sp_tensors)
# compute the grid coordinates in this scale, in [batch_idx, x y z] order
cur_roi_grid_coords = roi_grid_coords // cur_stride
cur_roi_grid_coords = torch.cat([batch_idx, cur_roi_grid_coords], dim=-1)
cur_roi_grid_coords = cur_roi_grid_coords.int()
# voxel neighbor aggregation
pooled_features = pool_layer(
xyz=cur_voxel_xyz.contiguous(),
xyz_batch_cnt=cur_voxel_xyz_batch_cnt,
new_xyz=roi_grid_xyz.contiguous().view(-1, 3),
new_xyz_batch_cnt=roi_grid_batch_cnt,
new_coords=cur_roi_grid_coords.contiguous().view(-1, 4),
features=cur_sp_tensors.features.contiguous(),
voxel2point_indices=v2p_ind_tensor
)
pooled_features = pooled_features.view(
-1, self.pool_cfg.GRID_SIZE ** 3,
pooled_features.shape[-1]
) # (BxN, 6x6x6, C)
pooled_features_list.append(pooled_features)
ms_pooled_features = torch.cat(pooled_features_list, dim=-1)
return ms_pooled_features
def get_global_grid_points_of_roi(self, rois, grid_size):
rois = rois.view(-1, rois.shape[-1])
batch_size_rcnn = rois.shape[0]
local_roi_grid_points = self.get_dense_grid_points(rois, batch_size_rcnn, grid_size) # (B, 6x6x6, 3)
global_roi_grid_points = common_utils.rotate_points_along_z(
local_roi_grid_points.clone(), rois[:, 6]
).squeeze(dim=1)
global_center = rois[:, 0:3].clone()
global_roi_grid_points += global_center.unsqueeze(dim=1)
return global_roi_grid_points, local_roi_grid_points
@staticmethod
def get_dense_grid_points(rois, batch_size_rcnn, grid_size):
faked_features = rois.new_ones((grid_size, grid_size, grid_size))
dense_idx = faked_features.nonzero() # (N, 3) [x_idx, y_idx, z_idx]
dense_idx = dense_idx.repeat(batch_size_rcnn, 1, 1).float() # (B, 6x6x6, 3)
local_roi_size = rois.view(batch_size_rcnn, -1)[:, 3:6]
roi_grid_points = (dense_idx + 0.5) / grid_size * local_roi_size.unsqueeze(dim=1) \
- (local_roi_size.unsqueeze(dim=1) / 2) # (B, 6x6x6, 3)
return roi_grid_points
def forward(self, batch_dict):
"""
:param input_data: input dict
:return:
"""
targets_dict = self.proposal_layer(
batch_dict, nms_config=self.model_cfg.NMS_CONFIG['TRAIN' if self.training else 'TEST']
)
if self.training:
targets_dict = self.assign_targets(batch_dict)
batch_dict['rois'] = targets_dict['rois']
batch_dict['roi_labels'] = targets_dict['roi_labels']
# RoI aware pooling
pooled_features = self.roi_grid_pool(batch_dict) # (BxN, 6x6x6, C)
# Box Refinement
pooled_features = pooled_features.view(pooled_features.size(0), -1)
shared_features = self.shared_fc_layer(pooled_features)
rcnn_cls = self.cls_pred_layer(self.cls_fc_layers(shared_features))
rcnn_reg = self.reg_pred_layer(self.reg_fc_layers(shared_features))
if not self.training:
batch_cls_preds, batch_box_preds = self.generate_predicted_boxes(
batch_size=batch_dict['batch_size'], rois=batch_dict['rois'], cls_preds=rcnn_cls, box_preds=rcnn_reg
)
batch_dict['batch_cls_preds'] = batch_cls_preds
batch_dict['batch_box_preds'] = batch_box_preds
batch_dict['cls_preds_normalized'] = False
else:
targets_dict['rcnn_cls'] = rcnn_cls
targets_dict['rcnn_reg'] = rcnn_reg
batch_dict['rcnn_cls'] = rcnn_cls
batch_dict['rcnn_reg'] = rcnn_reg
self.forward_ret_dict = targets_dict
return batch_dict | 45.027451 | 116 | 0.620536 | import torch
import torch.nn as nn
from ...ops.pointnet2.pointnet2_stack import voxel_pool_modules as voxelpool_stack_modules
from ...utils import common_utils
from .roi_head_template import RoIHeadTemplate
class VoxelRCNNHead(RoIHeadTemplate):
def __init__(self, backbone_channels, model_cfg, point_cloud_range, voxel_size, num_class=1, **kwargs):
super().__init__(num_class=num_class, model_cfg=model_cfg)
self.model_cfg = model_cfg
self.pool_cfg = model_cfg.ROI_GRID_POOL
LAYER_cfg = self.pool_cfg.POOL_LAYERS
self.point_cloud_range = point_cloud_range
self.voxel_size = voxel_size
c_out = 0
self.roi_grid_pool_layers = nn.ModuleList()
for src_name in self.pool_cfg.FEATURES_SOURCE:
mlps = LAYER_cfg[src_name].MLPS
for k in range(len(mlps)):
mlps[k] = [backbone_channels[src_name]] + mlps[k]
pool_layer = voxelpool_stack_modules.NeighborVoxelSAModuleMSG(
query_ranges=LAYER_cfg[src_name].QUERY_RANGES,
nsamples=LAYER_cfg[src_name].NSAMPLE,
radii=LAYER_cfg[src_name].POOL_RADIUS,
mlps=mlps,
pool_method=LAYER_cfg[src_name].POOL_METHOD,
)
self.roi_grid_pool_layers.append(pool_layer)
c_out += sum([x[-1] for x in mlps])
GRID_SIZE = self.model_cfg.ROI_GRID_POOL.GRID_SIZE
pre_channel = GRID_SIZE * GRID_SIZE * GRID_SIZE * c_out
shared_fc_list = []
for k in range(0, self.model_cfg.SHARED_FC.__len__()):
shared_fc_list.extend([
nn.Linear(pre_channel, self.model_cfg.SHARED_FC[k], bias=False),
nn.BatchNorm1d(self.model_cfg.SHARED_FC[k]),
nn.ReLU(inplace=True)
])
pre_channel = self.model_cfg.SHARED_FC[k]
if k != self.model_cfg.SHARED_FC.__len__() - 1 and self.model_cfg.DP_RATIO > 0:
shared_fc_list.append(nn.Dropout(self.model_cfg.DP_RATIO))
self.shared_fc_layer = nn.Sequential(*shared_fc_list)
cls_fc_list = []
for k in range(0, self.model_cfg.CLS_FC.__len__()):
cls_fc_list.extend([
nn.Linear(pre_channel, self.model_cfg.CLS_FC[k], bias=False),
nn.BatchNorm1d(self.model_cfg.CLS_FC[k]),
nn.ReLU()
])
pre_channel = self.model_cfg.CLS_FC[k]
if k != self.model_cfg.CLS_FC.__len__() - 1 and self.model_cfg.DP_RATIO > 0:
cls_fc_list.append(nn.Dropout(self.model_cfg.DP_RATIO))
self.cls_fc_layers = nn.Sequential(*cls_fc_list)
self.cls_pred_layer = nn.Linear(pre_channel, self.num_class, bias=True)
reg_fc_list = []
for k in range(0, self.model_cfg.REG_FC.__len__()):
reg_fc_list.extend([
nn.Linear(pre_channel, self.model_cfg.REG_FC[k], bias=False),
nn.BatchNorm1d(self.model_cfg.REG_FC[k]),
nn.ReLU()
])
pre_channel = self.model_cfg.REG_FC[k]
if k != self.model_cfg.REG_FC.__len__() - 1 and self.model_cfg.DP_RATIO > 0:
reg_fc_list.append(nn.Dropout(self.model_cfg.DP_RATIO))
self.reg_fc_layers = nn.Sequential(*reg_fc_list)
self.reg_pred_layer = nn.Linear(pre_channel, self.box_coder.code_size * self.num_class, bias=True)
self.init_weights()
def init_weights(self):
init_func = nn.init.xavier_normal_
for module_list in [self.shared_fc_layer, self.cls_fc_layers, self.reg_fc_layers]:
for m in module_list.modules():
if isinstance(m, nn.Linear):
init_func(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
nn.init.normal_(self.cls_pred_layer.weight, 0, 0.01)
nn.init.constant_(self.cls_pred_layer.bias, 0)
nn.init.normal_(self.reg_pred_layer.weight, mean=0, std=0.001)
nn.init.constant_(self.reg_pred_layer.bias, 0)
def roi_grid_pool(self, batch_dict):
rois = batch_dict['rois']
batch_size = batch_dict['batch_size']
with_vf_transform = batch_dict.get('with_voxel_feature_transform', False)
roi_grid_xyz, _ = self.get_global_grid_points_of_roi(
rois, grid_size=self.pool_cfg.GRID_SIZE
)
roi_grid_xyz = roi_grid_xyz.view(batch_size, -1, 3)
roi_grid_coords_x = (roi_grid_xyz[:, :, 0:1] - self.point_cloud_range[0]) // self.voxel_size[0]
roi_grid_coords_y = (roi_grid_xyz[:, :, 1:2] - self.point_cloud_range[1]) // self.voxel_size[1]
roi_grid_coords_z = (roi_grid_xyz[:, :, 2:3] - self.point_cloud_range[2]) // self.voxel_size[2]
roi_grid_coords = torch.cat([roi_grid_coords_x, roi_grid_coords_y, roi_grid_coords_z], dim=-1)
batch_idx = rois.new_zeros(batch_size, roi_grid_coords.shape[1], 1)
for bs_idx in range(batch_size):
batch_idx[bs_idx, :, 0] = bs_idx
roi_grid_batch_cnt = rois.new_zeros(batch_size).int().fill_(roi_grid_coords.shape[1])
pooled_features_list = []
for k, src_name in enumerate(self.pool_cfg.FEATURES_SOURCE):
pool_layer = self.roi_grid_pool_layers[k]
cur_stride = batch_dict['multi_scale_3d_strides'][src_name]
cur_sp_tensors = batch_dict['multi_scale_3d_features'][src_name]
if with_vf_transform:
cur_sp_tensors = batch_dict['multi_scale_3d_features_post'][src_name]
else:
cur_sp_tensors = batch_dict['multi_scale_3d_features'][src_name]
cur_coords = cur_sp_tensors.indices
cur_voxel_xyz = common_utils.get_voxel_centers(
cur_coords[:, 1:4],
downsample_times=cur_stride,
voxel_size=self.voxel_size,
point_cloud_range=self.point_cloud_range
)
cur_voxel_xyz_batch_cnt = cur_voxel_xyz.new_zeros(batch_size).int()
for bs_idx in range(batch_size):
cur_voxel_xyz_batch_cnt[bs_idx] = (cur_coords[:, 0] == bs_idx).sum()
v2p_ind_tensor = common_utils.generate_voxel2pinds(cur_sp_tensors)
cur_roi_grid_coords = roi_grid_coords // cur_stride
cur_roi_grid_coords = torch.cat([batch_idx, cur_roi_grid_coords], dim=-1)
cur_roi_grid_coords = cur_roi_grid_coords.int()
pooled_features = pool_layer(
xyz=cur_voxel_xyz.contiguous(),
xyz_batch_cnt=cur_voxel_xyz_batch_cnt,
new_xyz=roi_grid_xyz.contiguous().view(-1, 3),
new_xyz_batch_cnt=roi_grid_batch_cnt,
new_coords=cur_roi_grid_coords.contiguous().view(-1, 4),
features=cur_sp_tensors.features.contiguous(),
voxel2point_indices=v2p_ind_tensor
)
pooled_features = pooled_features.view(
-1, self.pool_cfg.GRID_SIZE ** 3,
pooled_features.shape[-1]
)
pooled_features_list.append(pooled_features)
ms_pooled_features = torch.cat(pooled_features_list, dim=-1)
return ms_pooled_features
def get_global_grid_points_of_roi(self, rois, grid_size):
rois = rois.view(-1, rois.shape[-1])
batch_size_rcnn = rois.shape[0]
local_roi_grid_points = self.get_dense_grid_points(rois, batch_size_rcnn, grid_size)
global_roi_grid_points = common_utils.rotate_points_along_z(
local_roi_grid_points.clone(), rois[:, 6]
).squeeze(dim=1)
global_center = rois[:, 0:3].clone()
global_roi_grid_points += global_center.unsqueeze(dim=1)
return global_roi_grid_points, local_roi_grid_points
@staticmethod
def get_dense_grid_points(rois, batch_size_rcnn, grid_size):
faked_features = rois.new_ones((grid_size, grid_size, grid_size))
dense_idx = faked_features.nonzero()
dense_idx = dense_idx.repeat(batch_size_rcnn, 1, 1).float()
local_roi_size = rois.view(batch_size_rcnn, -1)[:, 3:6]
roi_grid_points = (dense_idx + 0.5) / grid_size * local_roi_size.unsqueeze(dim=1) \
- (local_roi_size.unsqueeze(dim=1) / 2)
return roi_grid_points
def forward(self, batch_dict):
targets_dict = self.proposal_layer(
batch_dict, nms_config=self.model_cfg.NMS_CONFIG['TRAIN' if self.training else 'TEST']
)
if self.training:
targets_dict = self.assign_targets(batch_dict)
batch_dict['rois'] = targets_dict['rois']
batch_dict['roi_labels'] = targets_dict['roi_labels']
pooled_features = self.roi_grid_pool(batch_dict)
pooled_features = pooled_features.view(pooled_features.size(0), -1)
shared_features = self.shared_fc_layer(pooled_features)
rcnn_cls = self.cls_pred_layer(self.cls_fc_layers(shared_features))
rcnn_reg = self.reg_pred_layer(self.reg_fc_layers(shared_features))
if not self.training:
batch_cls_preds, batch_box_preds = self.generate_predicted_boxes(
batch_size=batch_dict['batch_size'], rois=batch_dict['rois'], cls_preds=rcnn_cls, box_preds=rcnn_reg
)
batch_dict['batch_cls_preds'] = batch_cls_preds
batch_dict['batch_box_preds'] = batch_box_preds
batch_dict['cls_preds_normalized'] = False
else:
targets_dict['rcnn_cls'] = rcnn_cls
targets_dict['rcnn_reg'] = rcnn_reg
batch_dict['rcnn_cls'] = rcnn_cls
batch_dict['rcnn_reg'] = rcnn_reg
self.forward_ret_dict = targets_dict
return batch_dict | true | true |
1c3861f9352bd1c49f29f5b231e79694c4ccbaae | 1,148 | py | Python | setup.py | anoadragon453/matrix-nio | e157c5fed96d75797bbb0fadaa0c473fe11b5e6a | [
"Apache-2.0"
] | null | null | null | setup.py | anoadragon453/matrix-nio | e157c5fed96d75797bbb0fadaa0c473fe11b5e6a | [
"Apache-2.0"
] | null | null | null | setup.py | anoadragon453/matrix-nio | e157c5fed96d75797bbb0fadaa0c473fe11b5e6a | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from os import path
from io import open
from setuptools import find_packages, setup
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name="matrix-nio",
version="0.5",
url="https://github.com/poljar/matrix-nio",
author='Damir Jelić',
author_email="poljar@termina.org.uk",
description=("A Python Matrix client library, designed according to sans "
"I/O principles."),
long_description=long_description,
long_description_content_type="text/markdown",
license="ISC",
packages=find_packages(),
install_requires=[
"attrs",
"future",
"aiohttp;python_version>'3.5'",
"typing;python_version<'3.5'",
"h11",
"h2",
"logbook",
"jsonschema",
"unpaddedbase64",
"pycryptodome",
],
extras_require={
"e2e": [
"python-olm>=3.1.0",
"peewee>=3.9.5",
"cachetools",
"atomicwrites",
]
},
zip_safe=False
)
| 25.511111 | 78 | 0.585366 |
from os import path
from io import open
from setuptools import find_packages, setup
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name="matrix-nio",
version="0.5",
url="https://github.com/poljar/matrix-nio",
author='Damir Jelić',
author_email="poljar@termina.org.uk",
description=("A Python Matrix client library, designed according to sans "
"I/O principles."),
long_description=long_description,
long_description_content_type="text/markdown",
license="ISC",
packages=find_packages(),
install_requires=[
"attrs",
"future",
"aiohttp;python_version>'3.5'",
"typing;python_version<'3.5'",
"h11",
"h2",
"logbook",
"jsonschema",
"unpaddedbase64",
"pycryptodome",
],
extras_require={
"e2e": [
"python-olm>=3.1.0",
"peewee>=3.9.5",
"cachetools",
"atomicwrites",
]
},
zip_safe=False
)
| true | true |
1c386306cbf996ad6e0e5a309be4b1867d2be2d2 | 127 | py | Python | iot_kernel/__main__.py | iot49/iot-kernel | 5eb3e32a7e63b6ce5297cbab3df48d50d45cad50 | [
"MIT"
] | 2 | 2021-07-01T12:32:43.000Z | 2021-12-16T22:21:20.000Z | iot_kernel/__main__.py | iot49/iot-kernel | 5eb3e32a7e63b6ce5297cbab3df48d50d45cad50 | [
"MIT"
] | null | null | null | iot_kernel/__main__.py | iot49/iot-kernel | 5eb3e32a7e63b6ce5297cbab3df48d50d45cad50 | [
"MIT"
] | null | null | null | from ipykernel.kernelapp import IPKernelApp
from .kernel import IoTKernel
IPKernelApp.launch_instance(kernel_class=IoTKernel)
| 25.4 | 51 | 0.874016 | from ipykernel.kernelapp import IPKernelApp
from .kernel import IoTKernel
IPKernelApp.launch_instance(kernel_class=IoTKernel)
| true | true |
1c3864289d1471dfe5e1c5439cfcea90e3d1c2a2 | 1,438 | py | Python | tests/test_filename_checking.py | rcbops/flake8-filename | 1bfb0f6174e11bf6d3750aecddcb8e0216ad645b | [
"Apache-2.0"
] | null | null | null | tests/test_filename_checking.py | rcbops/flake8-filename | 1bfb0f6174e11bf6d3750aecddcb8e0216ad645b | [
"Apache-2.0"
] | 4 | 2018-05-30T14:31:43.000Z | 2020-08-10T08:23:28.000Z | tests/test_filename_checking.py | rcbops/flake8-filename | 1bfb0f6174e11bf6d3750aecddcb8e0216ad645b | [
"Apache-2.0"
] | 4 | 2018-05-25T15:21:38.000Z | 2020-02-22T13:04:24.000Z | # -*- coding: utf-8 -*-
import pytest
# args to only use checks that raise an 'N' prefixed error
extra_args = ['--select', 'N']
config = r"""
[flake8]
filename_check1 = filter_regex=test_.+
filename_regex=test_[\w-]+$
"""
def test_pass_filter_and_match_filename(flake8dir):
"""Verify that no violations are raised when a file passes the filter and matches the desired filename."""
# Setup
flake8dir.make_setup_cfg(config)
flake8dir.make_file('test_File-10.py', 'import sys')
# Test
result = flake8dir.run_flake8(extra_args)
assert result.out_lines == []
def test_fail_filter(flake8dir):
"""Verify that no violations are raised when a file fails the filter."""
# Setup
flake8dir.make_setup_cfg(config)
flake8dir.make_file('regular_file.py', 'import sys')
# Test
result = flake8dir.run_flake8(extra_args)
assert result.out_lines == []
def test_pass_filter_and_fail_match(flake8dir):
"""Verify that a violation is raised when a file passes the filter and fails to match the desired filename."""
# Setup
flake8dir.make_setup_cfg(config)
flake8dir.make_file('test_not.allowed.py', 'import sys')
expected = ["./test_not.allowed.py:0:1: N501 filename failed regex validation 'test_[\\w-]+$'"]
# Test
result = flake8dir.run_flake8(extra_args)
observed = result.out_lines
pytest.helpers.assert_lines(expected, observed)
| 27.132075 | 114 | 0.698192 |
import pytest
extra_args = ['--select', 'N']
config = r"""
[flake8]
filename_check1 = filter_regex=test_.+
filename_regex=test_[\w-]+$
"""
def test_pass_filter_and_match_filename(flake8dir):
flake8dir.make_setup_cfg(config)
flake8dir.make_file('test_File-10.py', 'import sys')
result = flake8dir.run_flake8(extra_args)
assert result.out_lines == []
def test_fail_filter(flake8dir):
flake8dir.make_setup_cfg(config)
flake8dir.make_file('regular_file.py', 'import sys')
result = flake8dir.run_flake8(extra_args)
assert result.out_lines == []
def test_pass_filter_and_fail_match(flake8dir):
flake8dir.make_setup_cfg(config)
flake8dir.make_file('test_not.allowed.py', 'import sys')
expected = ["./test_not.allowed.py:0:1: N501 filename failed regex validation 'test_[\\w-]+$'"]
result = flake8dir.run_flake8(extra_args)
observed = result.out_lines
pytest.helpers.assert_lines(expected, observed)
| true | true |
1c38649c2ee470c2fd8143faecb7a0b42c0a9261 | 3,292 | py | Python | targets/waxwing/base.py | timvideos/litex-buildenv | a103774342c0265458979a79082b233d9ce08edb | [
"BSD-2-Clause"
] | 198 | 2018-01-17T05:39:54.000Z | 2022-03-15T08:59:16.000Z | targets/waxwing/base.py | timvideos/litex-buildenv | a103774342c0265458979a79082b233d9ce08edb | [
"BSD-2-Clause"
] | 610 | 2017-12-31T01:32:32.000Z | 2022-03-19T22:07:28.000Z | targets/waxwing/base.py | timvideos/litex-buildenv | a103774342c0265458979a79082b233d9ce08edb | [
"BSD-2-Clause"
] | 85 | 2018-01-13T05:51:38.000Z | 2022-02-11T18:54:14.000Z | # Support for the Numato Saturn (http://numato.com/product/saturn-spartan-6-fpga-development-board-with-ddr-sdram)
from fractions import Fraction
from migen import *
from litex.soc.integration.soc_sdram import *
from litex.soc.integration.builder import *
from litex.soc.interconnect import wishbone
from litedram.modules import MT46H32M16
from litedram.phy import s6ddrphy
from litedram.core import ControllerSettings
from targets.utils import dict_set_max
#from gateware import cas
from gateware import info
from gateware import spi_flash
from .crg import _CRG
class BaseSoC(SoCSDRAM):
mem_map = {**SoCSDRAM.mem_map, **{
'spiflash': 0x20000000,
}}
def __init__(self, platform, **kwargs):
if kwargs.get('cpu_type', None) == 'mor1kx':
dict_set_max(kwargs, 'integrated_rom_size', 0x10000)
else:
dict_set_max(kwargs, 'integrated_rom_size', 0x8000)
dict_set_max(kwargs, 'integrated_sram_size', 0x8000)
sys_clk_freq = (31 + Fraction(1, 4))*1000*1000
# SoCSDRAM ---------------------------------------------------------------------------------
SoCSDRAM.__init__(self, platform, clk_freq=sys_clk_freq, **kwargs)
# CRG --------------------------------------------------------------------------------------
self.submodules.crg = _CRG(platform, sys_clk_freq)
self.platform.add_period_constraint(self.crg.cd_sys.clk, 1e9/sys_clk_freq)
# DDR2 SDRAM -------------------------------------------------------------------------------
if True:
sdram_module = MT46H32M16(sys_clk_freq, "1:2")
self.submodules.ddrphy = s6ddrphy.S6HalfRateDDRPHY(
platform.request("ddram"),
memtype = sdram_module.memtype,
rd_bitslip = 2,
wr_bitslip = 3,
dqs_ddr_alignment="C1")
self.add_csr("ddrphy")
controller_settings = ControllerSettings(
with_bandwidth=True)
self.register_sdram(
self.ddrphy,
geom_settings = sdram_module.geom_settings,
timing_settings = sdram_module.timing_settings,
controller_settings=controller_settings)
self.comb += [
self.ddrphy.clk4x_wr_strb.eq(self.crg.clk4x_wr_strb),
self.ddrphy.clk4x_rd_strb.eq(self.crg.clk4x_rd_strb),
]
# Basic peripherals ------------------------------------------------------------------------
# info module
self.submodules.info = info.Info(platform, self.__class__.__name__)
self.add_csr("info")
# control and status module
#self.submodules.cas = cas.ControlAndStatus(platform, sys_clk_freq)
self.add_csr("cas")
# Add debug interface if the CPU has one ---------------------------------------------------
if hasattr(self.cpu, "debug_bus"):
self.register_mem(
name="vexriscv_debug",
address=0xf00f0000,
interface=self.cpu.debug_bus,
size=0x100)
# Memory mapped SPI Flash ------------------------------------------------------------------
# TODO: Add SPI flash.
SoC = BaseSoC
| 38.729412 | 114 | 0.545869 |
from fractions import Fraction
from migen import *
from litex.soc.integration.soc_sdram import *
from litex.soc.integration.builder import *
from litex.soc.interconnect import wishbone
from litedram.modules import MT46H32M16
from litedram.phy import s6ddrphy
from litedram.core import ControllerSettings
from targets.utils import dict_set_max
from gateware import info
from gateware import spi_flash
from .crg import _CRG
class BaseSoC(SoCSDRAM):
mem_map = {**SoCSDRAM.mem_map, **{
'spiflash': 0x20000000,
}}
def __init__(self, platform, **kwargs):
if kwargs.get('cpu_type', None) == 'mor1kx':
dict_set_max(kwargs, 'integrated_rom_size', 0x10000)
else:
dict_set_max(kwargs, 'integrated_rom_size', 0x8000)
dict_set_max(kwargs, 'integrated_sram_size', 0x8000)
sys_clk_freq = (31 + Fraction(1, 4))*1000*1000
SoCSDRAM.__init__(self, platform, clk_freq=sys_clk_freq, **kwargs)
self.submodules.crg = _CRG(platform, sys_clk_freq)
self.platform.add_period_constraint(self.crg.cd_sys.clk, 1e9/sys_clk_freq)
if True:
sdram_module = MT46H32M16(sys_clk_freq, "1:2")
self.submodules.ddrphy = s6ddrphy.S6HalfRateDDRPHY(
platform.request("ddram"),
memtype = sdram_module.memtype,
rd_bitslip = 2,
wr_bitslip = 3,
dqs_ddr_alignment="C1")
self.add_csr("ddrphy")
controller_settings = ControllerSettings(
with_bandwidth=True)
self.register_sdram(
self.ddrphy,
geom_settings = sdram_module.geom_settings,
timing_settings = sdram_module.timing_settings,
controller_settings=controller_settings)
self.comb += [
self.ddrphy.clk4x_wr_strb.eq(self.crg.clk4x_wr_strb),
self.ddrphy.clk4x_rd_strb.eq(self.crg.clk4x_rd_strb),
]
self.submodules.info = info.Info(platform, self.__class__.__name__)
self.add_csr("info")
self.add_csr("cas")
if hasattr(self.cpu, "debug_bus"):
self.register_mem(
name="vexriscv_debug",
address=0xf00f0000,
interface=self.cpu.debug_bus,
size=0x100)
SoC = BaseSoC
| true | true |
1c3864a2202941f66300375795f86ea08723517c | 9,270 | py | Python | sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_metadata_sync_configs_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 2,728 | 2015-01-09T10:19:32.000Z | 2022-03-31T14:50:33.000Z | sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_metadata_sync_configs_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 17,773 | 2015-01-05T15:57:17.000Z | 2022-03-31T23:50:25.000Z | sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_sql_pool_metadata_sync_configs_operations.py | rsdoherty/azure-sdk-for-python | 6bba5326677468e6660845a703686327178bb7b1 | [
"MIT"
] | 1,916 | 2015-01-19T05:05:41.000Z | 2022-03-31T19:36:44.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class SqlPoolMetadataSyncConfigsOperations(object):
"""SqlPoolMetadataSyncConfigsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.synapse.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
workspace_name, # type: str
sql_pool_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.MetadataSyncConfig"]
"""Get SQL pool metadata sync config.
Get the metadata sync configuration for a SQL pool.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param sql_pool_name: SQL pool name.
:type sql_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MetadataSyncConfig, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.MetadataSyncConfig or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.MetadataSyncConfig"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('MetadataSyncConfig', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/metadataSync/config'} # type: ignore
def create(
self,
resource_group_name, # type: str
workspace_name, # type: str
sql_pool_name, # type: str
metadata_sync_configuration, # type: "_models.MetadataSyncConfig"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.MetadataSyncConfig"]
"""Set SQL pool metadata sync config.
Set the metadata sync configuration for a SQL pool.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param sql_pool_name: SQL pool name.
:type sql_pool_name: str
:param metadata_sync_configuration: Metadata sync configuration.
:type metadata_sync_configuration: ~azure.mgmt.synapse.models.MetadataSyncConfig
:keyword callable cls: A custom type or function that will be passed the direct response
:return: MetadataSyncConfig, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.MetadataSyncConfig or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.MetadataSyncConfig"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(metadata_sync_configuration, 'MetadataSyncConfig')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('MetadataSyncConfig', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/metadataSync/config'} # type: ignore
| 48.789474 | 213 | 0.68123 |
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
from typing import Any, Callable, Dict, Generic, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class SqlPoolMetadataSyncConfigsOperations(object):
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name,
workspace_name,
sql_pool_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('MetadataSyncConfig', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/metadataSync/config'}
def create(
self,
resource_group_name,
workspace_name,
sql_pool_name,
metadata_sync_configuration,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self.create.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'sqlPoolName': self._serialize.url("sql_pool_name", sql_pool_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(metadata_sync_configuration, 'MetadataSyncConfig')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('MetadataSyncConfig', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/sqlPools/{sqlPoolName}/metadataSync/config'}
| true | true |
1c3864ebd82b1039ffbfd4ca1a853f561a39ca03 | 15,279 | py | Python | mol_dqn/experimental/multi_obj_opt.py | awesome-archive/google-research | 6b3c751abbf658b33fb03e51d7b84105d2dbea68 | [
"Apache-2.0"
] | 1 | 2019-02-16T12:17:14.000Z | 2019-02-16T12:17:14.000Z | mol_dqn/experimental/multi_obj_opt.py | awesome-archive/google-research | 6b3c751abbf658b33fb03e51d7b84105d2dbea68 | [
"Apache-2.0"
] | null | null | null | mol_dqn/experimental/multi_obj_opt.py | awesome-archive/google-research | 6b3c751abbf658b33fb03e51d7b84105d2dbea68 | [
"Apache-2.0"
] | 1 | 2019-01-27T13:03:12.000Z | 2019-01-27T13:03:12.000Z | # coding=utf-8
# Copyright 2018 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Maximizes the QED of the molecule while keep similarity.
Multi-Objective optimization using multiple Q functions.
Obj1: QED;
Obj2: similarity.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import json
import os
import random
import time
from absl import app
from absl import flags
from absl import logging
from baselines.common import schedules
from baselines.deepq import replay_buffer
import numpy as np
from rdkit import Chem
from rdkit import DataStructs
from rdkit.Chem import AllChem
from rdkit.Chem import Descriptors
from rdkit.Chem import QED
import tensorflow as tf
from tensorflow import gfile
from mol_dqn.chemgraph.mcts import deep_q_networks
from mol_dqn.chemgraph.mcts import molecules as molecules_mdp
from mol_dqn.chemgraph.py import molecules
from mol_dqn.chemgraph.tensorflow import core
flags.DEFINE_string('model_dir',
'/namespace/gas/primary/zzp/dqn/r=3/exp2_bs_dqn',
'The directory to save data to.')
flags.DEFINE_string('target_molecule', 'C1CCC2CCCCC2C1',
'The SMILES string of the target molecule.')
flags.DEFINE_string('start_molecule', None,
'The SMILES string of the start molecule.')
flags.DEFINE_string('hparams', None, 'Filename for serialized HParams.')
flags.DEFINE_boolean('multi_objective', True,
'Whether to run multi objective DQN.')
flags.DEFINE_integer('num_episodes', 2000, 'num episodes')
flags.DEFINE_float('gamma', 0.999, 'discount')
FLAGS = flags.FLAGS
class TargetWeightMolecule(molecules_mdp.Molecule):
"""Defines the subclass of a molecule MDP with a target molecular weight."""
def __init__(self, target_weight, **kwargs):
"""Initializes the class.
Args:
target_weight: Float. the target molecular weight.
**kwargs: The keyword arguments passed to the parent class.
"""
super(TargetWeightMolecule, self).__init__(**kwargs)
self.target_weight = target_weight
def _reward(self):
"""Calculates the reward of the current state.
The reward is defined as the negative l2 distance between the current
molecular weight and target molecular weight.
Returns:
Float. The negative distance.
"""
molecule = Chem.MolFromSmiles(self._state)
if molecule is None:
return -self.target_weight**2
return -(Descriptors.MolWt(molecule) - self.target_weight)**2
class MultiObjectiveRewardMolecule(molecules_mdp.Molecule):
"""Defines the subclass of generating a molecule with a specific reward.
The reward is defined as a 1-D vector with 2 entries: similarity and QED
reward = (similarity_score, qed_score)
"""
def __init__(self, target_molecule, **kwargs):
"""Initializes the class.
Args:
target_molecule: SMILES string. the target molecule against which we
calculate the similarity.
**kwargs: The keyword arguments passed to the parent class.
"""
super(MultiObjectiveRewardMolecule, self).__init__(**kwargs)
target_molecule = Chem.MolFromSmiles(target_molecule)
self._target_mol_fingerprint = self.get_fingerprint(target_molecule)
self._target_mol_scaffold = molecules.get_scaffold(target_molecule)
self.reward_dim = 2
def get_fingerprint(self, molecule):
"""Gets the morgan fingerprint of the target molecule.
Args:
molecule: Chem.Mol. The current molecule.
Returns:
rdkit.ExplicitBitVect. The fingerprint of the target.
"""
return AllChem.GetMorganFingerprint(molecule, radius=2)
def get_similarity(self, smiles):
"""Gets the similarity between the current molecule and the target molecule.
Args:
smiles: String. The SMILES string for the current molecule.
Returns:
Float. The Tanimoto similarity.
"""
structure = Chem.MolFromSmiles(smiles)
if structure is None:
return 0.0
fingerprint_structure = self.get_fingerprint(structure)
return DataStructs.TanimotoSimilarity(self._target_mol_fingerprint,
fingerprint_structure)
def _reward(self):
"""Calculates the reward of the current state.
The reward is defined as a tuple of the similarity and QED value.
Returns:
A tuple of the similarity and qed value
"""
# calculate similarity.
# if the current molecule does not contain the scaffold of the target,
# similarity is zero.
if self._state is None:
return 0.0, 0.0
mol = Chem.MolFromSmiles(self._state)
if mol is None:
return 0.0, 0.0
if molecules.contains_scaffold(mol, self._target_mol_scaffold):
similarity_score = self.get_similarity(self._state)
else:
similarity_score = 0.0
# calculate QED
qed_value = QED.qed(mol)
return similarity_score * FLAGS.gamma**(
self.max_steps - self._counter), qed_value * FLAGS.gamma**(
self.max_steps - self._counter)
# TODO(zzp): use the tf.estimator interface.
def run_training(hparams, environment, dqn):
"""Runs the training procedure.
Briefly, the agent runs the action network to get an action to take in
the environment. The state transition and reward are stored in the memory.
Periodically the agent samples a batch of samples from the memory to
update(train) its Q network. Note that the Q network and the action network
share the same set of parameters, so the action network is also updated by
the samples of (state, action, next_state, reward) batches.
Args:
hparams: tf.HParams. The hyper parameters of the model.
environment: molecules.Molecule. The environment to run on.
dqn: An instance of the DeepQNetwork class.
Returns:
None
"""
summary_writer = tf.summary.FileWriter(FLAGS.model_dir)
tf.reset_default_graph()
with tf.Session() as sess:
dqn.build()
model_saver = tf.Saver(max_to_keep=hparams.max_num_checkpoints)
# The schedule for the epsilon in epsilon greedy policy.
exploration = schedules.PiecewiseSchedule(
[(0, 1.0), (int(hparams.num_episodes / 2), 0.1),
(hparams.num_episodes, 0.01)],
outside_value=0.01)
if hparams.prioritized:
memory = replay_buffer.PrioritizedReplayBuffer(hparams.replay_buffer_size,
hparams.prioritized_alpha)
beta_schedule = schedules.LinearSchedule(
hparams.num_episodes, initial_p=hparams.prioritized_beta, final_p=0)
else:
memory = replay_buffer.ReplayBuffer(hparams.replay_buffer_size)
beta_schedule = None
sess.run(tf.global_variables_initializer())
sess.run(dqn.update_op)
global_step = 0
for episode in range(FLAGS.num_episodes * 6):
sim_weight = random.random()
dqn.objective_weight = np.array([[sim_weight], [1 - sim_weight]])
logging.info('Episode %i, ObjWeight %s', episode,
str(dqn.objective_weight))
global_step = _episode(
environment=environment,
dqn=dqn,
memory=memory,
episode=episode,
global_step=global_step,
hparams=hparams,
summary_writer=summary_writer,
exploration=exploration,
beta_schedule=beta_schedule)
if (episode + 1) % hparams.update_frequency == 0:
sess.run(dqn.update_op)
if (episode + 1) % hparams.save_frequency == 0:
model_saver.save(
sess,
os.path.join(FLAGS.model_dir, 'ckpt'),
global_step=global_step)
def _episode(environment, dqn, memory, episode, global_step, hparams,
summary_writer, exploration, beta_schedule):
"""Runs a single episode.
Args:
environment: molecules.Molecule; the environment to run on.
dqn: DeepQNetwork used for estimating rewards.
memory: ReplayBuffer used to store observations and rewards.
episode: Integer episode number.
global_step: Integer global step; the total number of steps across all
episodes.
hparams: HParams.
summary_writer: FileWriter used for writing Summary protos.
exploration: Schedule used for exploration in the environment.
beta_schedule: Schedule used for prioritized replay buffers.
Returns:
Updated global_step.
"""
episode_start_time = time.time()
environment.initialize()
if hparams.num_bootstrap_heads:
head = np.random.randint(hparams.num_bootstrap_heads)
else:
head = 0
for step in range(hparams.max_steps_per_episode):
result = _step(
environment=environment,
dqn=dqn,
memory=memory,
episode=episode,
hparams=hparams,
exploration=exploration,
head=head)
if step == hparams.max_steps_per_episode - 1:
episode_summary = dqn.log_result(result.state, result.reward)
summary_writer.add_summary(episode_summary, global_step)
logging.info('Episode %d/%d took %gs', episode + 1, hparams.num_episodes,
time.time() - episode_start_time)
logging.info('SMILES: %s\n', result.state)
# Use %s since reward can be a tuple or a float number.
logging.info('The reward is: %s', str(result.reward))
if (episode > min(50, hparams.num_episodes / 10)) and (
global_step % hparams.learning_frequency == 0):
if hparams.prioritized:
(state_t, _, reward_t, state_tp1, done_mask, weight,
indices) = memory.sample(
hparams.batch_size, beta=beta_schedule.value(episode))
else:
(state_t, _, reward_t, state_tp1,
done_mask) = memory.sample(hparams.batch_size)
weight = np.ones([reward_t.shape[0]])
# np.atleast_2d cannot be used here because a new dimension will
# be always added in the front and there is no way of changing this.
if reward_t.ndim == 1:
reward_t = np.expand_dims(reward_t, axis=1)
td_error, error_summary, _ = dqn.train(
states=state_t,
rewards=reward_t,
next_states=state_tp1,
done=np.expand_dims(done_mask, axis=1),
weight=np.expand_dims(weight, axis=1))
summary_writer.add_summary(error_summary, global_step)
logging.info('Current TD error: %.4f', np.mean(np.abs(td_error)))
if hparams.prioritized:
memory.update_priorities(
indices,
np.abs(np.squeeze(td_error) + hparams.prioritized_epsilon).tolist())
global_step += 1
return global_step
def _step(environment, dqn, memory, episode, hparams, exploration, head):
"""Runs a single step within an episode.
Args:
environment: molecules.Molecule; the environment to run on.
dqn: DeepQNetwork used for estimating rewards.
memory: ReplayBuffer used to store observations and rewards.
episode: Integer episode number.
hparams: HParams.
exploration: Schedule used for exploration in the environment.
head: Integer index of the DeepQNetwork head to use.
Returns:
molecules.Result object containing the result of the step.
"""
# Compute the encoding for each valid action from the current state.
steps_left = hparams.max_steps_per_episode - environment.num_steps_taken
valid_actions = list(environment.get_valid_actions())
observations = np.vstack([
np.append(deep_q_networks.get_fingerprint(act, hparams), steps_left)
for act in valid_actions
])
action = valid_actions[dqn.get_action(
observations, head=head, update_epsilon=exploration.value(episode))]
result = environment.step(action)
action_fingerprints = np.vstack([
np.append(deep_q_networks.get_fingerprint(act, hparams), steps_left)
for act in environment.get_valid_actions()
])
# we store the fingerprint of the action in obs_t so action
# does not matter here.
memory.add(
obs_t=np.append(
deep_q_networks.get_fingerprint(action, hparams), steps_left),
action=0,
reward=result.reward,
obs_tp1=action_fingerprints,
done=float(result.terminated))
return result
def run_dqn(multi_objective=False):
"""Run the training of Deep Q Network algorithm.
Args:
multi_objective: Boolean. Whether to run the multiobjective DQN.
"""
if FLAGS.hparams is not None:
with gfile.Open(FLAGS.hparams, 'r') as f:
hparams = deep_q_networks.get_hparams(**json.load(f))
else:
hparams = deep_q_networks.get_hparams()
logging.info(
'HParams:\n%s', '\n'.join([
'\t%s: %s' % (key, value)
for key, value in sorted(hparams.values().iteritems())
]))
# TODO(zzp): merge single objective DQN to multi objective DQN.
if multi_objective:
environment = MultiObjectiveRewardMolecule(
target_molecule=FLAGS.target_molecule,
atom_types=set(hparams.atom_types),
init_mol=FLAGS.start_molecule,
allow_removal=hparams.allow_removal,
allow_no_modification=hparams.allow_no_modification,
allow_bonds_between_rings=False,
allowed_ring_sizes={3, 4, 5, 6},
max_steps=hparams.max_steps_per_episode)
dqn = deep_q_networks.MultiObjectiveDeepQNetwork(
objective_weight=np.array([[0.5], [0.5]]),
input_shape=(hparams.batch_size, hparams.fingerprint_length + 1),
q_fn=functools.partial(
deep_q_networks.multi_layer_model, hparams=hparams),
optimizer=hparams.optimizer,
grad_clipping=hparams.grad_clipping,
num_bootstrap_heads=hparams.num_bootstrap_heads,
gamma=hparams.gamma,
epsilon=1.0)
else:
environment = TargetWeightMolecule(
target_weight=FLAGS.target_weight,
atom_types=set(hparams.atom_types),
init_mol=FLAGS.start_molecule,
allow_removal=hparams.allow_removal,
allow_no_modification=hparams.allow_no_modification,
allow_bonds_between_rings=hparams.allow_bonds_between_rings,
allowed_ring_sizes=set(hparams.allowed_ring_sizes),
max_steps=hparams.max_steps_per_episode)
dqn = deep_q_networks.DeepQNetwork(
input_shape=(hparams.batch_size, hparams.fingerprint_length + 1),
q_fn=functools.partial(
deep_q_networks.multi_layer_model, hparams=hparams),
optimizer=hparams.optimizer,
grad_clipping=hparams.grad_clipping,
num_bootstrap_heads=hparams.num_bootstrap_heads,
gamma=hparams.gamma,
epsilon=1.0)
run_training(
hparams=hparams,
environment=environment,
dqn=dqn,
)
core.write_hparams(hparams, os.path.join(FLAGS.model_dir, 'config.json'))
def main(argv):
del argv # unused.
run_dqn(FLAGS.multi_objective)
if __name__ == '__main__':
app.run(main)
| 35.532558 | 80 | 0.70214 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import json
import os
import random
import time
from absl import app
from absl import flags
from absl import logging
from baselines.common import schedules
from baselines.deepq import replay_buffer
import numpy as np
from rdkit import Chem
from rdkit import DataStructs
from rdkit.Chem import AllChem
from rdkit.Chem import Descriptors
from rdkit.Chem import QED
import tensorflow as tf
from tensorflow import gfile
from mol_dqn.chemgraph.mcts import deep_q_networks
from mol_dqn.chemgraph.mcts import molecules as molecules_mdp
from mol_dqn.chemgraph.py import molecules
from mol_dqn.chemgraph.tensorflow import core
flags.DEFINE_string('model_dir',
'/namespace/gas/primary/zzp/dqn/r=3/exp2_bs_dqn',
'The directory to save data to.')
flags.DEFINE_string('target_molecule', 'C1CCC2CCCCC2C1',
'The SMILES string of the target molecule.')
flags.DEFINE_string('start_molecule', None,
'The SMILES string of the start molecule.')
flags.DEFINE_string('hparams', None, 'Filename for serialized HParams.')
flags.DEFINE_boolean('multi_objective', True,
'Whether to run multi objective DQN.')
flags.DEFINE_integer('num_episodes', 2000, 'num episodes')
flags.DEFINE_float('gamma', 0.999, 'discount')
FLAGS = flags.FLAGS
class TargetWeightMolecule(molecules_mdp.Molecule):
def __init__(self, target_weight, **kwargs):
super(TargetWeightMolecule, self).__init__(**kwargs)
self.target_weight = target_weight
def _reward(self):
molecule = Chem.MolFromSmiles(self._state)
if molecule is None:
return -self.target_weight**2
return -(Descriptors.MolWt(molecule) - self.target_weight)**2
class MultiObjectiveRewardMolecule(molecules_mdp.Molecule):
def __init__(self, target_molecule, **kwargs):
super(MultiObjectiveRewardMolecule, self).__init__(**kwargs)
target_molecule = Chem.MolFromSmiles(target_molecule)
self._target_mol_fingerprint = self.get_fingerprint(target_molecule)
self._target_mol_scaffold = molecules.get_scaffold(target_molecule)
self.reward_dim = 2
def get_fingerprint(self, molecule):
return AllChem.GetMorganFingerprint(molecule, radius=2)
def get_similarity(self, smiles):
structure = Chem.MolFromSmiles(smiles)
if structure is None:
return 0.0
fingerprint_structure = self.get_fingerprint(structure)
return DataStructs.TanimotoSimilarity(self._target_mol_fingerprint,
fingerprint_structure)
def _reward(self):
if self._state is None:
return 0.0, 0.0
mol = Chem.MolFromSmiles(self._state)
if mol is None:
return 0.0, 0.0
if molecules.contains_scaffold(mol, self._target_mol_scaffold):
similarity_score = self.get_similarity(self._state)
else:
similarity_score = 0.0
qed_value = QED.qed(mol)
return similarity_score * FLAGS.gamma**(
self.max_steps - self._counter), qed_value * FLAGS.gamma**(
self.max_steps - self._counter)
def run_training(hparams, environment, dqn):
summary_writer = tf.summary.FileWriter(FLAGS.model_dir)
tf.reset_default_graph()
with tf.Session() as sess:
dqn.build()
model_saver = tf.Saver(max_to_keep=hparams.max_num_checkpoints)
exploration = schedules.PiecewiseSchedule(
[(0, 1.0), (int(hparams.num_episodes / 2), 0.1),
(hparams.num_episodes, 0.01)],
outside_value=0.01)
if hparams.prioritized:
memory = replay_buffer.PrioritizedReplayBuffer(hparams.replay_buffer_size,
hparams.prioritized_alpha)
beta_schedule = schedules.LinearSchedule(
hparams.num_episodes, initial_p=hparams.prioritized_beta, final_p=0)
else:
memory = replay_buffer.ReplayBuffer(hparams.replay_buffer_size)
beta_schedule = None
sess.run(tf.global_variables_initializer())
sess.run(dqn.update_op)
global_step = 0
for episode in range(FLAGS.num_episodes * 6):
sim_weight = random.random()
dqn.objective_weight = np.array([[sim_weight], [1 - sim_weight]])
logging.info('Episode %i, ObjWeight %s', episode,
str(dqn.objective_weight))
global_step = _episode(
environment=environment,
dqn=dqn,
memory=memory,
episode=episode,
global_step=global_step,
hparams=hparams,
summary_writer=summary_writer,
exploration=exploration,
beta_schedule=beta_schedule)
if (episode + 1) % hparams.update_frequency == 0:
sess.run(dqn.update_op)
if (episode + 1) % hparams.save_frequency == 0:
model_saver.save(
sess,
os.path.join(FLAGS.model_dir, 'ckpt'),
global_step=global_step)
def _episode(environment, dqn, memory, episode, global_step, hparams,
summary_writer, exploration, beta_schedule):
episode_start_time = time.time()
environment.initialize()
if hparams.num_bootstrap_heads:
head = np.random.randint(hparams.num_bootstrap_heads)
else:
head = 0
for step in range(hparams.max_steps_per_episode):
result = _step(
environment=environment,
dqn=dqn,
memory=memory,
episode=episode,
hparams=hparams,
exploration=exploration,
head=head)
if step == hparams.max_steps_per_episode - 1:
episode_summary = dqn.log_result(result.state, result.reward)
summary_writer.add_summary(episode_summary, global_step)
logging.info('Episode %d/%d took %gs', episode + 1, hparams.num_episodes,
time.time() - episode_start_time)
logging.info('SMILES: %s\n', result.state)
logging.info('The reward is: %s', str(result.reward))
if (episode > min(50, hparams.num_episodes / 10)) and (
global_step % hparams.learning_frequency == 0):
if hparams.prioritized:
(state_t, _, reward_t, state_tp1, done_mask, weight,
indices) = memory.sample(
hparams.batch_size, beta=beta_schedule.value(episode))
else:
(state_t, _, reward_t, state_tp1,
done_mask) = memory.sample(hparams.batch_size)
weight = np.ones([reward_t.shape[0]])
if reward_t.ndim == 1:
reward_t = np.expand_dims(reward_t, axis=1)
td_error, error_summary, _ = dqn.train(
states=state_t,
rewards=reward_t,
next_states=state_tp1,
done=np.expand_dims(done_mask, axis=1),
weight=np.expand_dims(weight, axis=1))
summary_writer.add_summary(error_summary, global_step)
logging.info('Current TD error: %.4f', np.mean(np.abs(td_error)))
if hparams.prioritized:
memory.update_priorities(
indices,
np.abs(np.squeeze(td_error) + hparams.prioritized_epsilon).tolist())
global_step += 1
return global_step
def _step(environment, dqn, memory, episode, hparams, exploration, head):
steps_left = hparams.max_steps_per_episode - environment.num_steps_taken
valid_actions = list(environment.get_valid_actions())
observations = np.vstack([
np.append(deep_q_networks.get_fingerprint(act, hparams), steps_left)
for act in valid_actions
])
action = valid_actions[dqn.get_action(
observations, head=head, update_epsilon=exploration.value(episode))]
result = environment.step(action)
action_fingerprints = np.vstack([
np.append(deep_q_networks.get_fingerprint(act, hparams), steps_left)
for act in environment.get_valid_actions()
])
memory.add(
obs_t=np.append(
deep_q_networks.get_fingerprint(action, hparams), steps_left),
action=0,
reward=result.reward,
obs_tp1=action_fingerprints,
done=float(result.terminated))
return result
def run_dqn(multi_objective=False):
if FLAGS.hparams is not None:
with gfile.Open(FLAGS.hparams, 'r') as f:
hparams = deep_q_networks.get_hparams(**json.load(f))
else:
hparams = deep_q_networks.get_hparams()
logging.info(
'HParams:\n%s', '\n'.join([
'\t%s: %s' % (key, value)
for key, value in sorted(hparams.values().iteritems())
]))
if multi_objective:
environment = MultiObjectiveRewardMolecule(
target_molecule=FLAGS.target_molecule,
atom_types=set(hparams.atom_types),
init_mol=FLAGS.start_molecule,
allow_removal=hparams.allow_removal,
allow_no_modification=hparams.allow_no_modification,
allow_bonds_between_rings=False,
allowed_ring_sizes={3, 4, 5, 6},
max_steps=hparams.max_steps_per_episode)
dqn = deep_q_networks.MultiObjectiveDeepQNetwork(
objective_weight=np.array([[0.5], [0.5]]),
input_shape=(hparams.batch_size, hparams.fingerprint_length + 1),
q_fn=functools.partial(
deep_q_networks.multi_layer_model, hparams=hparams),
optimizer=hparams.optimizer,
grad_clipping=hparams.grad_clipping,
num_bootstrap_heads=hparams.num_bootstrap_heads,
gamma=hparams.gamma,
epsilon=1.0)
else:
environment = TargetWeightMolecule(
target_weight=FLAGS.target_weight,
atom_types=set(hparams.atom_types),
init_mol=FLAGS.start_molecule,
allow_removal=hparams.allow_removal,
allow_no_modification=hparams.allow_no_modification,
allow_bonds_between_rings=hparams.allow_bonds_between_rings,
allowed_ring_sizes=set(hparams.allowed_ring_sizes),
max_steps=hparams.max_steps_per_episode)
dqn = deep_q_networks.DeepQNetwork(
input_shape=(hparams.batch_size, hparams.fingerprint_length + 1),
q_fn=functools.partial(
deep_q_networks.multi_layer_model, hparams=hparams),
optimizer=hparams.optimizer,
grad_clipping=hparams.grad_clipping,
num_bootstrap_heads=hparams.num_bootstrap_heads,
gamma=hparams.gamma,
epsilon=1.0)
run_training(
hparams=hparams,
environment=environment,
dqn=dqn,
)
core.write_hparams(hparams, os.path.join(FLAGS.model_dir, 'config.json'))
def main(argv):
del argv
run_dqn(FLAGS.multi_objective)
if __name__ == '__main__':
app.run(main)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.